from concurrent.futures import ThreadPoolExecutor

import numpy as np
from scapy.all import *
from scapy.layers.inet import TCP
from xbase_util.common_util import process_origin_pos, get_packets_percentage, get_statistic_fields, \
    get_res_status_code_list, get_dns_domain, get_dns_domain_suffix, get_cookie_end_with_semicolon_count, \
    get_ua_duplicate_count, filter_visible_chars
from xbase_util.packet_util import get_detail_by_package
from xbase_util.pcap_util import process_session_id_disk_simple, reassemble_session_pcap, reassemble_tcp_pcap
from xbase_util.xbase_constant import src_dst_header, statisticHeader


class PacketUtil:

    @staticmethod
    def get_typed(all_packages, publicField):
        # note：关掉enable_abnormal_field，避免save_to_es_thread有新的字段存不进es
        try:
            return [
                get_detail_by_package(publicField, package['req_header'], '', package['res_header'],
                                      '',enable_abnormal_field=False) for package in all_packages]
        except Exception as e:
            traceback.print_exc()
            print(f"get_typed:{e}")
            return []

    @staticmethod
    def get_all_packets(packets):
        http_request_pattern = re.compile(r'^(GET|POST|HEAD|PUT|DELETE|OPTIONS|PATCH) ')
        http_response_pattern = re.compile(r'^HTTP/')
        last_is_request = None
        rs_packet = {
            "request": "",
            "response": ""
        }
        rs_packets = []
        all_req = []
        all_res = []
        for packet_item in packets:
            if TCP in packet_item and Raw in packet_item:
                raw_data = packet_item[Raw].load.decode(errors='ignore')
                if http_request_pattern.match(raw_data):
                    all_req.append(packet_item)
                    if last_is_request is None:
                        rs_packet['request'] = raw_data
                        rs_packet['request_packet'] = packet_item
                    elif last_is_request:
                        rs_packets.append(rs_packet)
                        rs_packet["request"] = raw_data
                        rs_packet['request_packet'] = packet_item
                    else:
                        rs_packet["request"] = raw_data
                        rs_packet['request_packet'] = packet_item
                    last_is_request = True
                elif http_response_pattern.match(raw_data):
                    all_res.append(packet_item)
                    if last_is_request is None:
                        rs_packet["response"] = raw_data
                        rs_packet['response_packet'] = packet_item
                        rs_packets.append(rs_packet)
                    elif last_is_request:
                        rs_packet["response"] = raw_data
                        rs_packet['response_packet'] = packet_item
                        rs_packets.append(rs_packet)
                    else:
                        rs_packet["response"] = raw_data
                        rs_packet['response_packet'] = packet_item
                        rs_packets.append(rs_packet)
                    last_is_request = False
                else:
                    last_is_request = None
        if rs_packet['request'] != "" or rs_packet["response"] != "":
            rs_packets.append(rs_packet)
        return rs_packets, all_req, all_res

    @staticmethod
    def calculate_average_time_interval(packets):
        if len(packets) < 2:
            return 0

        intervals = [(packets[i].time - packets[i - 1].time) for i in range(1, len(packets))]
        average_interval = sum(intervals) / len(intervals)
        # 如果结果不是0，保留5位小数
        return round(average_interval, 5) if average_interval != 0 else 0

    @staticmethod
    def calculate_variance(packets, average_interval):
        if len(packets) < 2:
            return 0

        intervals = [(packets[i].time - packets[i - 1].time) for i in range(1, len(packets))]

        # 计算方差
        variance = sum((interval - average_interval) ** 2 for interval in intervals) / len(intervals)
        return round(variance, 5) if variance != 0 else 0

    @staticmethod
    def thread_in_process(path_prefix, session, esdb):
        protocol = session['protocol']
        for h in src_dst_header + statisticHeader:
            if h.endswith("_var") or h.endswith("_mean") or h.endswith("_count") or h.endswith(
                    "_average") or h.endswith("_rate") or h.endswith("_min") or h.endswith("_max") or h.endswith(
                "_length") or h.endswith("_percentage"):
                session.update({h: -1})
            else:
                session.update({h: ""})

        originPos = session['packetPos']
        pos = process_origin_pos(originPos)
        session.update({
            "dns_base_domain": "",
            "dns_base_domain_length": -1,
            "dns_domain_suffix": "",
            "dns_domain_suffix_length": -1,
            "dns_domain": "",
            "dns_domain_length": -1,
            "dns_domain_length_mean": -1,
            "dns_domain_length_var": -1,
            "req_res_period_mean": -1,
            "req_res_period_var": -1,
            "status_code_1x_count": -1,
            "status_code_2x_count": -1,
            "status_code_3x_count": -1,
            "status_code_4x_count": -1,
            "status_code_5x_count": -1,
            "req_bytes_percentage": get_packets_percentage(session, True),
            "res_bytes_percentage": get_packets_percentage(session, False),
            "cookie_end_with_semicolon_count": -1,
            "ua_duplicate_count": -1,
            "pcap_flow_text": ""
        })
        if len(pos) == 0:
            return [PacketUtil.edit_session_and_save(session=session)], None
        try:
            if not isinstance(pos, list):
                print(f"不是list :{pos}  type:{type(pos)}")
            stream, obj = process_session_id_disk_simple(id=session['id'], node=session['node'],
                                                         packet_pos=pos, esdb=esdb,
                                                         pcap_path_prefix=path_prefix)

            if stream is None:
                return [PacketUtil.edit_session_and_save(session=session)], None
            packets_scapy = rdpcap(io.BytesIO(stream))

            if packets_scapy is None or len(packets_scapy) == 0:
                return [PacketUtil.edit_session_and_save(session=session)], None
            streams = b""
            for pkt in packets_scapy:
                if Raw in pkt:
                    streams += pkt[Raw].load
            text_data = filter_visible_chars(streams)
            session.update({"pcap_flow_text": text_data})
            need_read_stream = ("tcp" in protocol and "http" in protocol) or "dns" in protocol
            if need_read_stream:
                if "tcp" in protocol and "http" in protocol:
                    session.update(get_statistic_fields(packets_scapy))
                    skey = f"{session['source.ip']}:{session['source.port']}"
                    all_packets = reassemble_session_pcap(reassemble_tcp_pcap(packets_scapy), skey,
                                                          session_id=session['id'])
                    if len(all_packets) != 0:
                        all_req_size = [item['req_size'] for item in all_packets if item['key'] == skey]
                        all_res_size = [item['res_size'] for item in all_packets if item['key'] != skey]
                        num_1, num_2, num_3, num_4, num_5 = get_res_status_code_list(all_packets)
                        # 获取请求头参数数量
                        req_header_count_list = [req['req_header'].count(":") for req in all_packets]
                        # 请求的时间间隔
                        request_flattened_time = [item['req_time'] for item in all_packets]
                        request_time_diffs = [request_flattened_time[i + 1] - request_flattened_time[i] for i in
                                              range(len(request_flattened_time) - 1)]
                        request_mean_diff = round(np.nanmean(request_time_diffs), 5) or 0
                        request_variance_diff = round(np.nanvar(request_time_diffs), 5) or 0
                        # 响应的时间间隔
                        response_flattened_time = [item['res_time'] for item in all_packets]
                        response_time_diffs = [response_flattened_time[i + 1] - response_flattened_time[i] for i in
                                               range(len(response_flattened_time) - 1)]
                        response_mean_diff = round(np.nanmean(response_time_diffs), 5) or 0
                        response_variance_diff = round(np.nanvar(response_time_diffs), 5) or 0

                        time_period = [(abs(item['res_time'] - item['req_time'])) for item in
                                       all_packets if item['res_time'] != 0 and item['req_time'] != 0]

                        session.update({
                            "all_req_packet_size_mean": round(np.nanmean(all_req_size), 5) or 0,
                            "all_req_packet_size_var": round(np.nanvar(all_req_size), 5) or 0,
                            "all_req_packet_time_period_mean": request_mean_diff,
                            "all_req_packet_time_period_var": request_variance_diff,
                            "all_res_packet_size_mean": round(np.nanmean(all_res_size), 5) or 0,
                            "all_res_packet_size_var": round(np.nanvar(all_res_size), 5) or 0,
                            "all_res_packet_time_period_mean": response_mean_diff,
                            "all_res_packet_time_period_var": response_variance_diff,
                            "req_header_count_mean": round(np.nanmean(req_header_count_list), 5) or 0,
                            "req_header_count_var": round(np.nanvar(req_header_count_list), 5) or 0,
                            "req_res_period_mean": round(np.nanmean(time_period), 5) or 0,
                            "req_res_period_var": round(np.nanvar(time_period), 5) or 0,
                            "status_code_1x_count": num_1,
                            "status_code_2x_count": num_2,
                            "status_code_3x_count": num_3,
                            "status_code_4x_count": num_4,
                            "status_code_5x_count": num_5,
                            "cookie_end_with_semicolon_count": get_cookie_end_with_semicolon_count(all_packets),
                            "ua_duplicate_count": get_ua_duplicate_count(all_packets),
                        })
                        return PacketUtil.get_typed(all_packets, publicField=session), stream
                elif "dns" in protocol:
                    dns_domain = get_dns_domain(packets_scapy)
                    dns_domain_length = len(dns_domain)
                    dns_domain_suffix = get_dns_domain_suffix(dns_domain)
                    dns_base_domain = dns_domain.replace(f".{dns_domain_suffix}", "")
                    dns_base_domain = f"{dns_base_domain}"
                    session.update({
                        "dns_base_domain": dns_base_domain,
                        "dns_base_domain_length": len(dns_base_domain),
                        "dns_domain_suffix": dns_domain_suffix,
                        "dns_domain_suffix_length": len(dns_domain_suffix),
                        "dns_domain": dns_domain,
                        "dns_domain_length": dns_domain_length,
                        "dns_domain_length_mean": round(np.mean(dns_domain_length), 5),
                        "dns_domain_length_var": round(np.var(dns_domain_length), 5)})
                    return [PacketUtil.edit_session_and_save(session=session)], stream
            else:
                return [PacketUtil.edit_session_and_save(session=session)], stream
        except Exception:
            print("获取pcap错误")
            traceback.print_exc()
        return [PacketUtil.edit_session_and_save(session=session)], None

    @staticmethod
    def package_session_process(pcap_thread_in_process, per_process, path_prefix, esdb):
        try:
            with ThreadPoolExecutor(max_workers=pcap_thread_in_process) as pool:
                futures = []
                for index_session, session in enumerate(per_process):
                    future = pool.submit(PacketUtil.thread_in_process,
                                         path_prefix, session, esdb)
                    futures.append(future)
                session_list = []
                stream_map = {}
                for index, future in enumerate(futures):
                    sessions, stream = future.result()
                    session_list.extend(sessions)
                    if len(sessions) > 0:
                        stream_map[sessions[0]['id']] = stream
                return session_list, stream_map
        except Exception:
            traceback.print_exc()
            return [], {}

    @staticmethod
    def edit_session_and_save(session):
        session["initRTT"] = session.get("initRTT", 0)
        session["length"] = session.get("length", 0)
        return session
