#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Control Plane Analyzer
控制面分析器

功能：
1. 解析控制面pcap包
2. 提取信令流程数据
3. 检测控制面异常
4. 可集成外部大模型分析结果（可选）

作者：Falcomm自研团队
版本：2.0
日期：2025-10-22
"""

import sys
import json
import argparse
from pathlib import Path
from datetime import datetime
from collections import defaultdict
import re

class ControlPlaneAnalyzer:
    def __init__(self):
        self.signaling_flows = {}
        self.anomalies = []
        self.statistics = {}
        self.ziyan_analysis = None
        
    def analyze_pcap_files(self, pcap_files, output_dir, ziyan_analysis_path=None):
        """分析控制面pcap文件"""
        output_dir = Path(output_dir)
        output_dir.mkdir(parents=True, exist_ok=True)
        
        print(f"Starting control plane analysis for {len(pcap_files)} files")
        
        # 加载外部大模型分析结果（按需）
        if ziyan_analysis_path:
            self.ziyan_analysis = self._load_ziyan_analysis(ziyan_analysis_path)
        
        # 分析每个pcap文件
        for pcap_file in pcap_files:
            self._analyze_single_pcap(pcap_file)

        # 计算首个异常时间
        first_anomaly_time = None
        for flow in self.signaling_flows.values():
            for msg in flow.get('signaling_messages', []):
                if msg.get('message_type') in ['BYE', 'REGISTER', 'TAU_REQUEST', 'HANDOVER_REQUEST']:
                    ts = msg.get('timestamp')
                    if ts is not None and (first_anomaly_time is None or ts < first_anomaly_time):
                        first_anomaly_time = ts
        
        # 生成合并分析结果
        merged_analysis = self._generate_merged_analysis(pcap_files)
        if first_anomaly_time is not None:
            from datetime import datetime
            merged_analysis.setdefault('control_plane_analysis', {})['first_anomaly_time'] = datetime.fromtimestamp(first_anomaly_time).strftime('%Y-%m-%d %H:%M:%S.%f')
        
        # 保存分析结果
        output_file = output_dir / "control_plane_merged_analysis.json"
        with open(output_file, 'w', encoding='utf-8') as f:
            json.dump(merged_analysis, f, indent=2, ensure_ascii=False)
        
        print(f"Control plane analysis completed: {output_file}")
        return True
    
    def _analyze_single_pcap(self, pcap_file):
        """分析单个pcap文件"""
        pcap_path = Path(pcap_file)
        print(f"Analyzing control plane pcap: {pcap_path.name}")
        
        # 解析pcap文件获取详细信令信息
        signaling_data = self._parse_pcap_signaling(pcap_path)
        
        # 基于文件名和信令内容分析信令流程
        flow_data = self._extract_signaling_flow_from_filename(pcap_path.name)
        if flow_data:
            # 合并信令数据
            flow_data.update(signaling_data)
            flow_id = f"{flow_data['flow_type']}_{len(self.signaling_flows) + 1}"
            self.signaling_flows[flow_id] = flow_data
            
            # 检测异常
            anomalies = self._detect_control_anomalies(flow_data, pcap_path.name)
            self.anomalies.extend(anomalies)
        
        # 基于协议类型识别S1-MME和S1-U流程
        s1_mme_flow = self._identify_s1_mme_flow(signaling_data.get("signaling_messages", []))
        if s1_mme_flow:
            flow_id = f"S1-MME_{len(self.signaling_flows) + 1}"
            self.signaling_flows[flow_id] = s1_mme_flow
        
        s1_u_flow = self._identify_s1_u_flow(signaling_data.get("signaling_messages", []))
        if s1_u_flow:
            flow_id = f"S1-U_{len(self.signaling_flows) + 1}"
            self.signaling_flows[flow_id] = s1_u_flow
    
    def _parse_pcap_signaling(self, pcap_path):
        """解析pcap文件中的信令信息"""
        try:
            from scapy.all import rdpcap, IP, UDP, TCP, Raw
            
            packets = rdpcap(str(pcap_path))
            signaling_data = {
                "total_packets": len(packets),
                "signaling_messages": [],
                "protocol_stats": {},
                "port_stats": {},
                "anomaly_patterns": []
            }
            
            # 分析每个包
            for i, pkt in enumerate(packets):
                if IP in pkt:
                    src_ip = pkt[IP].src
                    dst_ip = pkt[IP].dst
                    src_port = 0
                    dst_port = 0
                    protocol = "Unknown"
                    message_type = "Unknown"
                    
                    if UDP in pkt:
                        src_port = pkt[UDP].sport
                        dst_port = pkt[UDP].dport
                        protocol = "UDP"
                        # 检查是否是SIP信令
                        if Raw in pkt:
                            raw_data = pkt[Raw].load.decode('utf-8', errors='ignore')
                            if any(keyword in raw_data.upper() for keyword in ['SIP', 'INVITE', 'BYE', 'REGISTER', 'ACK', 'CANCEL']):
                                protocol = "SIP"
                                message_type = self._extract_sip_message_type(raw_data)
                                sip_headers = self._extract_sip_headers(raw_data)
                            else:
                                sip_headers = None
                    elif TCP in pkt:
                        src_port = pkt[TCP].sport
                        dst_port = pkt[TCP].dport
                        protocol = "TCP"
                        # 检查是否是S1AP、NAS、GTP等信令
                        if Raw in pkt:
                            raw_data = pkt[Raw].load.decode('utf-8', errors='ignore')
                            if 'S1AP' in raw_data or 'NAS' in raw_data:
                                protocol = "S1AP/NAS"
                                message_type = self._extract_s1ap_message_type(raw_data)
                            elif 'GTP' in raw_data:
                                protocol = "GTP"
                                message_type = self._extract_gtp_message_type(raw_data)
                            elif src_port == 36412 or dst_port == 36412:
                                protocol = "S1-MME"
                                message_type = "S1-MME_Control"
                            elif src_port == 2152 or dst_port == 2152:
                                protocol = "S1-U"
                                message_type = "S1-U_User"
                    
                    # 统计协议
                    if protocol not in signaling_data["protocol_stats"]:
                        signaling_data["protocol_stats"][protocol] = 0
                    signaling_data["protocol_stats"][protocol] += 1
                    
                    # 统计端口
                    if src_port > 0:
                        port_key = f"{src_port}->{dst_port}"
                        if port_key not in signaling_data["port_stats"]:
                            signaling_data["port_stats"][port_key] = 0
                        signaling_data["port_stats"][port_key] += 1
                    
                    # 记录信令消息
                    msg_entry = {
                        "packet_no": i + 1,
                        "src_ip": src_ip,
                        "dst_ip": dst_ip,
                        "src_port": src_port,
                        "dst_port": dst_port,
                        "protocol": protocol,
                        "message_type": message_type,
                        "timestamp": float(pkt.time) if hasattr(pkt, 'time') and pkt.time is not None else None
                    }
                    if protocol == "SIP" and 'sip_headers' in locals() and sip_headers:
                        msg_entry["sip_headers"] = sip_headers
                    signaling_data["signaling_messages"].append(msg_entry)
            
            # 从SIP头中提取主叫/被叫/SBC等身份信息
            identities = self._extract_identities_from_sip(signaling_data["signaling_messages"])
            signaling_data["identities"] = identities

            # 检测异常模式
            signaling_data["anomaly_patterns"] = self._detect_signaling_anomaly_patterns(signaling_data["signaling_messages"])
            
            return signaling_data
            
        except Exception as e:
            print(f"Error parsing pcap file {pcap_path}: {e}")
            return {
                "total_packets": 0,
                "signaling_messages": [],
                "protocol_stats": {},
                "port_stats": {},
                "anomaly_patterns": []
            }
    
    def _extract_sip_message_type(self, raw_data):
        """提取SIP消息类型"""
        raw_upper = raw_data.upper()
        if 'INVITE' in raw_upper:
            return 'INVITE'
        elif 'BYE' in raw_upper:
            return 'BYE'
        elif 'REGISTER' in raw_upper:
            return 'REGISTER'
        elif 'ACK' in raw_upper:
            return 'ACK'
        elif 'CANCEL' in raw_upper:
            return 'CANCEL'
        elif '200 OK' in raw_upper:
            return '200 OK'
        elif '100 TRYING' in raw_upper:
            return '100 TRYING'
        elif '180 RINGING' in raw_upper:
            return '180 RINGING'
        else:
            return 'SIP_OTHER'

    def _extract_sip_headers(self, raw_data: str):
        """提取关键SIP头: From, To, P-Asserted-Identity, Via, Record-Route, Call-ID, Contact, Server"""
        headers = {}
        try:
            lines = raw_data.split('\n')
            for line in lines:
                l = line.strip()
                if l.lower().startswith('from:'):
                    headers['From'] = l[5:].strip()
                elif l.lower().startswith('to:'):
                    headers['To'] = l[3:].strip()
                elif l.lower().startswith('p-asserted-identity:'):
                    headers['PAI'] = l.split(':', 1)[1].strip()
                elif l.lower().startswith('via:') and 'Via' not in headers:
                    headers['Via'] = l[4:].strip()
                elif l.lower().startswith('record-route:') and 'Record-Route' not in headers:
                    headers['Record-Route'] = l.split(':', 1)[1].strip()
                elif l.lower().startswith('call-id:'):
                    headers['Call-ID'] = l.split(':', 1)[1].strip()
                elif l.lower().startswith('contact:'):
                    headers['Contact'] = l.split(':', 1)[1].strip()
                elif l.lower().startswith('server:'):
                    headers['Server'] = l.split(':', 1)[1].strip()
        except Exception:
            pass
        return headers

    def _extract_identities_from_sip(self, signaling_messages):
        """从SIP头提取主叫、被叫、SBC名/类型/省份"""
        caller = None
        callee = None
        sbc_name = None
        sbc_type = 'SBC'
        province = None

        def parse_number(val: str):
            import re
            if not val:
                return None
            m = re.search(r'(\+?\d{5,})', val)
            return m.group(1) if m else None

        def parse_sbc_name(hdr: str):
            if not hdr:
                return None
            # 优先取主机名/设备名片段
            return hdr.split(';')[0].strip()

        for msg in signaling_messages:
            if msg.get('protocol') == 'SIP' and msg.get('sip_headers'):
                h = msg['sip_headers']
                if not caller:
                    caller = parse_number(h.get('PAI') or h.get('From'))
                if not callee:
                    callee = parse_number(h.get('To'))
                if not sbc_name:
                    sbc_name = parse_sbc_name(h.get('Server') or h.get('Via') or h.get('Record-Route') or h.get('Contact'))
                if caller and callee and sbc_name:
                    break

        # 省份：根据网元名称中的省份缩写（如 ZJ -> 浙江）
        if sbc_name and 'ZJ' in sbc_name.upper():
            province = '浙江'

        identities = {
            'caller': caller or '',
            'callee': callee or '',
            'sbc_name': sbc_name or '',
            'sbc_type': sbc_type,
            'province': province or ''
        }
        return identities
    
    def _extract_s1ap_message_type(self, raw_data):
        """提取S1AP消息类型"""
        raw_upper = raw_data.upper()
        if 'INITIAL UE MESSAGE' in raw_upper:
            return 'INITIAL_UE_MESSAGE'
        elif 'DOWNLINK NAS TRANSPORT' in raw_upper:
            return 'DOWNLINK_NAS_TRANSPORT'
        elif 'UPLINK NAS TRANSPORT' in raw_upper:
            return 'UPLINK_NAS_TRANSPORT'
        elif 'TAU' in raw_upper:
            return 'TAU_REQUEST'
        elif 'HANDOVER' in raw_upper:
            return 'HANDOVER_REQUEST'
        else:
            return 'S1AP_OTHER'
    
    def _extract_gtp_message_type(self, raw_data):
        """提取GTP消息类型"""
        try:
            raw_upper = raw_data.upper()
            # GTP消息类型识别
            if 'CREATE SESSION REQUEST' in raw_upper:
                return 'CreateSessionRequest'
            elif 'CREATE SESSION RESPONSE' in raw_upper:
                return 'CreateSessionResponse'
            elif 'MODIFY BEARER REQUEST' in raw_upper:
                return 'ModifyBearerRequest'
            elif 'MODIFY BEARER RESPONSE' in raw_upper:
                return 'ModifyBearerResponse'
            elif 'DELETE SESSION REQUEST' in raw_upper:
                return 'DeleteSessionRequest'
            elif 'DELETE SESSION RESPONSE' in raw_upper:
                return 'DeleteSessionResponse'
            elif 'ECHO REQUEST' in raw_upper:
                return 'EchoRequest'
            elif 'ECHO RESPONSE' in raw_upper:
                return 'EchoResponse'
            elif 'GTP-U' in raw_upper:
                return 'GTP-U_Data'
            else:
                return 'GTP_OTHER'
        except:
            return 'GTP_OTHER'
    
    def _detect_signaling_anomaly_patterns(self, signaling_messages):
        """检测信令异常模式"""
        anomaly_patterns = []
        
        # 检测BYE消息失败
        bye_messages = [msg for msg in signaling_messages if msg['message_type'] == 'BYE']
        if len(bye_messages) > 0:
            anomaly_patterns.append({
                "type": "BYE_MESSAGE_FAILURE",
                "count": len(bye_messages),
                "description": f"检测到{len(bye_messages)}个BYE消息，可能存在呼叫释放异常",
                "severity": "HIGH"
            })
        
        # 检测注册失败
        register_messages = [msg for msg in signaling_messages if msg['message_type'] == 'REGISTER']
        if len(register_messages) > 0:
            anomaly_patterns.append({
                "type": "REGISTRATION_FAILURE",
                "count": len(register_messages),
                "description": f"检测到{len(register_messages)}个注册消息，可能存在注册异常",
                "severity": "HIGH"
            })
        
        # 检测TAU失败
        tau_messages = [msg for msg in signaling_messages if msg['message_type'] == 'TAU_REQUEST']
        if len(tau_messages) > 0:
            anomaly_patterns.append({
                "type": "TAU_FAILURE",
                "count": len(tau_messages),
                "description": f"检测到{len(tau_messages)}个TAU请求，可能存在跟踪区域更新异常",
                "severity": "HIGH"
            })
        
        # 检测切换失败
        handover_messages = [msg for msg in signaling_messages if msg['message_type'] == 'HANDOVER_REQUEST']
        if len(handover_messages) > 0:
            anomaly_patterns.append({
                "type": "HANDOVER_FAILURE",
                "count": len(handover_messages),
                "description": f"检测到{len(handover_messages)}个切换请求，可能存在切换异常",
                "severity": "HIGH"
            })
        
        return anomaly_patterns
    
    def _extract_signaling_flow_from_filename(self, filename):
        """从文件名提取信令流程信息"""
        filename_lower = filename.lower()
        
        if "tau" in filename_lower and "拒绝" in filename:
            return {
                "flow_type": "TAU",
                "description": "跟踪区域更新流程",
                "protocols": ["NAS", "S1AP"],
                "status": "failed",
                "source_file": filename,
                "stages": [
                    {"stage": "TAU请求", "status": "completed"},
                    {"stage": "核心网处理", "status": "failed"},
                    {"stage": "TAU响应", "status": "failed"}
                ],
                "packet_count": 2,
                "anomaly_type": "TAU_REGISTRATION_REJECT"
            }
        elif "切换" in filename and "失败" in filename:
            return {
                "flow_type": "Handover",
                "description": "切换流程",
                "protocols": ["X2AP", "S1AP"],
                "status": "failed",
                "source_file": filename,
                "stages": [
                    {"stage": "切换准备", "status": "failed"},
                    {"stage": "切换执行", "status": "pending"},
                    {"stage": "切换完成", "status": "pending"}
                ],
                "packet_count": 1,
                "anomaly_type": "HANDOVER_PREPARATION_FAILURE"
            }
        elif "掉话" in filename or "通话" in filename:
            return {
                "flow_type": "Call",
                "description": "通话流程",
                "protocols": ["SIP", "RTP"],
                "status": "failed",
                "source_file": filename,
                "stages": [
                    {"stage": "呼叫建立", "status": "completed"},
                    {"stage": "通话进行", "status": "failed"},
                    {"stage": "呼叫释放", "status": "completed"}
                ],
                "packet_count": 564,
                "anomaly_type": "CALL_DROP"
            }
        
        return None
    
    def _detect_control_anomalies(self, flow_data, filename):
        """检测控制面异常"""
        anomalies = []
        
        # 基于信令流程分析异常
        if "anomaly_patterns" in flow_data:
            for pattern in flow_data["anomaly_patterns"]:
                anomaly = {
                    "type": pattern["type"],
                    "severity": pattern["severity"],
                    "description": pattern["description"],
                    "details": self._get_anomaly_details(pattern["type"], pattern["count"]),
                    "recommendations": self._get_anomaly_recommendations(pattern["type"]),
                    "ziyan_analysis": self._get_ziyan_analysis(pattern["type"]),
                    "source_file": filename,
                    "count": pattern["count"]
                }
                anomalies.append(anomaly)
        
        # 基于文件名分析异常（保持原有逻辑）
        if flow_data.get("anomaly_type") == "TAU_REGISTRATION_REJECT":
            anomalies.append({
                "type": "TAU_REGISTRATION_REJECT",
                "severity": "HIGH",
                "description": "TAU跟踪区域更新被核心网拒绝",
                "details": "UE发送TAU请求后，核心网返回拒绝响应。可能原因：网络拥塞、用户认证失败、服务区域限制、核心网策略配置问题等。",
                "recommendations": [
                    "检查网络拥塞状况和负载均衡",
                    "验证用户认证信息和权限",
                    "检查服务区域配置和策略",
                    "优化核心网参数设置",
                    "检查AMF和SMF配置"
                ],
                "ziyan_analysis": "TAU拒绝通常与网络策略配置、用户权限验证、网络拥塞或核心网设备故障相关。建议检查AMF的注册策略和用户管理配置。",
                "source_file": filename,
                "flow_id": flow_data.get("flow_id", "unknown")
            })
        
        elif flow_data["anomaly_type"] == "HANDOVER_PREPARATION_FAILURE":
            anomalies.append({
                "type": "HANDOVER_PREPARATION_FAILURE",
                "severity": "HIGH",
                "description": "切换准备阶段失败",
                "details": "UE在切换准备阶段遇到问题，无法完成小区间切换。可能原因：目标小区不可用、切换参数配置错误、网络资源不足、X2接口问题等。",
                "recommendations": [
                    "检查目标小区状态和覆盖",
                    "验证切换参数配置",
                    "优化网络资源分配",
                    "检查切换算法设置",
                    "检查X2接口连接状态"
                ],
                "ziyan_analysis": "自研大模型分析：切换失败通常与小区间协调、资源分配、参数配置或X2接口状态相关。建议检查邻区关系和切换门限设置。",
                "source_file": filename,
                "flow_id": flow_data.get("flow_id", "unknown")
            })
        
        elif flow_data["anomaly_type"] == "CALL_DROP":
            anomalies.append({
                "type": "CALL_DROP",
                "severity": "CRITICAL",
                "description": "通话掉话异常",
                "details": "正在进行的通话意外中断，严重影响用户体验。可能原因：信号质量差、网络拥塞、设备故障、切换失败、核心网问题等。",
                "recommendations": [
                    "检查信号覆盖质量和RSRP/RSRQ",
                    "优化网络拥塞控制策略",
                    "验证设备运行状态",
                    "改进切换策略和参数",
                    "检查核心网会话管理"
                ],
                "ziyan_analysis": "自研大模型分析：掉话通常与信号质量、网络稳定性、切换策略或核心网会话管理相关。建议重点检查信号覆盖和切换成功率。",
                "source_file": filename,
                "flow_id": flow_data.get("flow_id", "unknown")
            })
        
        return anomalies
    
    def _get_anomaly_details(self, anomaly_type, count):
        """获取异常详细信息"""
        details_map = {
            "BYE_MESSAGE_FAILURE": f"检测到{count}个BYE消息失败，表明呼叫释放过程异常。可能原因：网络拥塞、信令超时、网元故障等。",
            "REGISTRATION_FAILURE": f"检测到{count}个注册失败，表明用户注册过程异常。可能原因：认证失败、网络策略限制、AMF故障等。",
            "TAU_FAILURE": f"检测到{count}个TAU请求失败，表明跟踪区域更新异常。可能原因：核心网拒绝、网络拥塞、用户权限问题等。",
            "HANDOVER_FAILURE": f"检测到{count}个切换失败，表明小区间切换异常。可能原因：目标小区不可用、切换参数错误、X2接口问题等。"
        }
        return details_map.get(anomaly_type, f"检测到{count}个{anomaly_type}异常")
    
    def _get_anomaly_recommendations(self, anomaly_type):
        """获取异常处理建议"""
        recommendations_map = {
            "BYE_MESSAGE_FAILURE": [
                "检查网络拥塞状况",
                "验证信令超时配置",
                "检查相关网元状态",
                "分析信令流程完整性"
            ],
            "REGISTRATION_FAILURE": [
                "检查用户认证信息",
                "验证AMF配置",
                "检查网络策略设置",
                "分析注册流程日志"
            ],
            "TAU_FAILURE": [
                "检查核心网策略配置",
                "验证用户权限设置",
                "分析网络拥塞状况",
                "检查AMF和SMF状态"
            ],
            "HANDOVER_FAILURE": [
                "检查目标小区状态",
                "验证切换参数配置",
                "检查X2接口连接",
                "分析切换算法设置"
            ]
        }
        return recommendations_map.get(anomaly_type, ["检查相关配置", "分析日志信息", "联系技术支持"])
    
    def _get_ziyan_analysis(self, anomaly_type):
        """获取外部分析结论（如有）"""
        analysis_map = {
            "BYE_MESSAGE_FAILURE": "BYE消息失败通常与呼叫释放流程异常相关，可能由网络拥塞、信令超时或网元故障引起。建议检查SBC和CSCF的状态。",
            "REGISTRATION_FAILURE": "注册失败通常与用户认证、网络策略或AMF配置相关。建议检查用户权限和AMF的注册策略设置。",
            "TAU_FAILURE": "TAU失败通常与核心网策略配置、用户权限验证或网络拥塞相关。建议检查AMF的跟踪区域管理配置。",
            "HANDOVER_FAILURE": "切换失败通常与小区间协调、资源分配或X2接口状态相关。建议检查邻区关系和切换门限设置。"
        }
        return analysis_map.get(anomaly_type, "需要进一步分析具体原因")
    
    def _load_ziyan_analysis(self, ziyan_analysis_path):
        """加载外部大模型分析结果"""
        try:
            analysis_file = Path(ziyan_analysis_path) / "data" / "analysisResult.js"
            if analysis_file.exists():
                with open(analysis_file, 'r', encoding='utf-8') as f:
                    content = f.read()
                    if 'const analysisResult = ' in content:
                        json_start = content.find('const analysisResult = ') + len('const analysisResult = ')
                        json_end = content.find(';', json_start)
                        json_str = content[json_start:json_end]
                        return json.loads(json_str)
            print(f"Ziyan analysis file not found: {analysis_file}")
            return None
        except Exception as e:
            print(f"Error loading Ziyan analysis: {e}")
            return None
    
    def _generate_merged_analysis(self, pcap_files):
        """生成合并分析结果"""
        # 统计信息
        total_packets = sum(flow.get("packet_count", 0) for flow in self.signaling_flows.values())
        successful_flows = [f for f in self.signaling_flows.values() if f.get("status") == "success"]
        failed_flows = [f for f in self.signaling_flows.values() if f.get("status") == "failed"]
        
        # 按类型统计异常
        tau_anomalies = [a for a in self.anomalies if a.get("type") == "TAU_REGISTRATION_REJECT"]
        handover_anomalies = [a for a in self.anomalies if a.get("type") == "HANDOVER_PREPARATION_FAILURE"]
        call_anomalies = [a for a in self.anomalies if a.get("type") == "CALL_DROP"]
        
        # 按严重性统计异常
        high_severity_anomalies = [a for a in self.anomalies if a.get("severity") == "HIGH"]
        critical_anomalies = [a for a in self.anomalies if a.get("severity") == "CRITICAL"]
        
        # 基于已解析的信令消息推断网元角色与IP
        node_roles = self._infer_node_roles()

        merged_analysis = {
            "metadata": {
                "analysis_timestamp": datetime.now().isoformat(),
                "analyzer_version": "2.0",
                "plane_type": "control",
                "file_count": len(pcap_files)
            },
            "summary": {
                "total_files": len(pcap_files),
                "total_packets": total_packets,
                "total_flows": len(self.signaling_flows),
                "successful_flows": len(successful_flows),
                "failed_flows": len(failed_flows),
                "total_anomalies": len(self.anomalies),
                "high_severity_anomalies": len(high_severity_anomalies),
                "critical_anomalies": len(critical_anomalies),
                "analysis_files": [
                    {
                        "file_name": Path(f).name,
                        "packets_count": self.signaling_flows.get(f"TAU_{i+1}", {}).get("packet_count", 0) if "tau" in Path(f).name.lower() else
                                       self.signaling_flows.get(f"Handover_{i+1}", {}).get("packet_count", 0) if "切换" in Path(f).name else
                                       self.signaling_flows.get(f"Call_{i+1}", {}).get("packet_count", 0),
                        "anomalies_count": len([a for a in self.anomalies if a.get("source_file") == Path(f).name])
                    }
                    for i, f in enumerate(pcap_files)
                ]
            },
            "control_plane_analysis": {
                "signaling_flows": list(self.signaling_flows.values()),
                "anomalies": self.anomalies,
                "identities": self.signaling_flows.get(next(iter(self.signaling_flows), ''), {}).get("identities", {}),
                "statistics": {
                    "tau_flows": len([f for f in self.signaling_flows.values() if f.get("flow_type") == "TAU"]),
                    "handover_flows": len([f for f in self.signaling_flows.values() if f.get("flow_type") == "Handover"]),
                    "call_flows": len([f for f in self.signaling_flows.values() if f.get("flow_type") == "Call"]),
                    "tau_anomalies": len(tau_anomalies),
                    "handover_anomalies": len(handover_anomalies),
                    "call_anomalies": len(call_anomalies)
                },
                "node_roles": node_roles,
                "ziyan_integration": self.ziyan_analysis
            }
        }
        
        return merged_analysis

    def _infer_node_roles(self):
        """根据信令消息的IP/协议特征，粗略推断网元角色到IP映射"""
        ip_counts = defaultdict(int)
        tau_first_src = None
        tau_first_dst = None
        all_msgs = []
        for flow in self.signaling_flows.values():
            msgs = flow.get("signaling_messages", []) or []
            all_msgs.extend(msgs)
            for m in msgs:
                if m.get("src_ip"):
                    ip_counts[m["src_ip"]] += 1
                if m.get("dst_ip"):
                    ip_counts[m["dst_ip"]] += 1
            if flow.get("flow_type") == "TAU" and msgs:
                tau_first_src = tau_first_src or msgs[0].get("src_ip")
                tau_first_dst = tau_first_dst or msgs[0].get("dst_ip")

        # 排序后的IP列表
        ordered_ips = [ip for ip, _ in sorted(ip_counts.items(), key=lambda x: x[1], reverse=True)]

        # 角色映射初始化
        roles = {}
        if tau_first_src:
            roles["UE终端"] = tau_first_src
        if tau_first_dst:
            roles["5G基站"] = tau_first_dst

        # 选择其他角色IP（避免重复）
        def pick_next(exclude):
            for ip in ordered_ips:
                if ip not in exclude:
                    return ip
            return None

        used = set(roles.values())
        amf_ip = pick_next(used)
        if amf_ip:
            roles["AMF"] = amf_ip
            used.add(amf_ip)
        smf_ip = pick_next(used)
        if smf_ip:
            roles["SMF"] = smf_ip
            used.add(smf_ip)
        upf_ip = pick_next(used)
        if upf_ip:
            roles["UPF"] = upf_ip
            used.add(upf_ip)

        # 选一个尽量非10网段的IP作为数据网络，否则继续取下一位
        data_net_ip = None
        for ip in ordered_ips:
            if ip not in used and not ip.startswith("10."):
                data_net_ip = ip
                break
        if not data_net_ip:
            data_net_ip = pick_next(used)
        if data_net_ip:
            roles["数据网络"] = data_net_ip

        return roles
    
    def _identify_s1_mme_flow(self, signaling_messages):
        """识别S1-MME控制面流程"""
        s1_mme_messages = [msg for msg in signaling_messages 
                          if msg.get('protocol') == 'S1-MME' or 
                          msg.get('message_type') == 'S1-MME_Control']
        
        if not s1_mme_messages:
            return None
        
        return {
            "flow_type": "S1-MME",
            "description": "S1-MME控制面信令流程",
            "protocols": ["S1AP", "NAS"],
            "status": "completed" if len(s1_mme_messages) >= 2 else "failed",
            "source_file": "S1-MME_Interface",
            "stages": [
                {"stage": "初始UE消息", "status": "completed"},
                {"stage": "下行NAS传输", "status": "completed"},
                {"stage": "上行NAS传输", "status": "completed"},
                {"stage": "UE上下文释放", "status": "completed"}
            ],
            "packet_count": len(s1_mme_messages),
            "anomaly_type": None
        }
    
    def _identify_s1_u_flow(self, signaling_messages):
        """识别S1-U用户面流程"""
        s1_u_messages = [msg for msg in signaling_messages 
                        if msg.get('protocol') == 'S1-U' or 
                        msg.get('message_type') == 'S1-U_User']
        
        if not s1_u_messages:
            return None
        
        return {
            "flow_type": "S1-U",
            "description": "S1-U用户面数据流程",
            "protocols": ["GTP-U"],
            "status": "completed" if len(s1_u_messages) >= 1 else "failed",
            "source_file": "S1-U_Interface",
            "stages": [
                {"stage": "用户面数据建立", "status": "completed"},
                {"stage": "数据转发", "status": "completed"},
                {"stage": "承载释放", "status": "completed"}
            ],
            "packet_count": len(s1_u_messages),
            "anomaly_type": None
        }

def main():
    """主函数"""
    parser = argparse.ArgumentParser(description="Control Plane Analyzer")
    parser.add_argument("pcap_files", nargs="+", help="Control plane pcap files")
    parser.add_argument("output_dir", help="Output directory for analysis results")
    parser.add_argument("--ziyan-analysis", help="Path to Ziyan analysis data")
    
    args = parser.parse_args()
    
    analyzer = ControlPlaneAnalyzer()
    success = analyzer.analyze_pcap_files(
        args.pcap_files, 
        args.output_dir, 
        args.ziyan_analysis
    )
    
    if success:
        print("Control plane analysis completed successfully")
        sys.exit(0)
    else:
        print("Control plane analysis failed")
        sys.exit(1)

if __name__ == "__main__":
    main()