import json
import multiprocessing
from concurrent.futures import ThreadPoolExecutor
from multiprocessing import Pool

import numpy as np
import pandas as pd
from elasticsearch import Elasticsearch
from scapy.all import *
from scapy.layers.dns import DNS
from scapy.layers.inet import TCP
from base import db
from base.util.common_util import *
from base.constant import out_folder, otherHeader, statisticHeader, sessionKeyname
from base.util.file_util import check_path
from base.util.log_util import log_e
from base.pcap import Pcap
from base.util.pcap_util import process_session_id_disk_simple, fix_pos, process_file


def save_packets(origin_data, file_path, file_lock):
    print(f"开始保存:{len(origin_data)}")
    with file_lock:
        import os
        csv_file_path = file_path
        file_exists = os.path.isfile(csv_file_path)
        write_header = not file_exists or os.path.getsize(csv_file_path) == 0
        df = pd.DataFrame(origin_data)
        all_columns = sessionKeyname + otherHeader + statisticHeader
        df = df.reindex(columns=all_columns, fill_value='')
        df.drop(columns=['packetPos'], inplace=True)
        all_columns.remove('packetPos')
        if write_header:
            df.to_csv(check_path(file_path), index=False, columns=all_columns)
        else:
            df.to_csv(check_path(csv_file_path), mode='a', index=False, header=False)


def get_header_value(header_set, value):
    result = [item for item in header_set if value in item]
    if len(result) != 0:
        return result[0].replace(f"{value}:", "").strip()
    else:
        return ""


def get_detail_by_package(param, publicField):
    res_field = publicField
    res_field["initRTT"] = firstOrZero(res_field.get("initRTT", 0))
    res_field["length"] = firstOrZero(res_field.get("length", 0))
    request_lines = param["request"].strip().split("\n")
    http_request_lines = [item for item in request_lines if "HTTP" in item]
    if len(http_request_lines) != 0:
        first_line = http_request_lines[0].split(" ")
        res_field['http.clientVersion'] = str(first_line[2]).replace("\n", "").replace("\r", "")
        res_field['http.path'] = first_line[1]
        res_field['http.method'] = first_line[0]
    else:
        res_field['http.clientVersion'] = ''
        res_field['http.path'] = ''
        res_field['http.method'] = ''
    res_field['http.request-referer'] = get_header_value(header_set=request_lines, value="Referer")
    res_field['http.useragent'] = get_header_value(header_set=request_lines, value="Agent")
    res_field['http.request-content-type'] = get_header_value(header_set=request_lines, value="Content-Type")
    res_field['http.hostTokens'] = get_header_value(header_set=request_lines, value="Host")
    response_lines = param["response"].strip().split("\n")
    http_response_lines = [item for item in response_lines if "HTTP" in item]
    if len(http_response_lines) != 0:
        first_line = http_response_lines[0].strip().split(" ")
        res_field['http.statuscode'] = first_line[1]
        res_field['http.serverVersion'] = first_line[0].split("/")[1]
    else:
        res_field['http.statuscode'] = ""
        res_field['http.serverVersion'] = ""
    res_field['http.response-server'] = get_header_value(header_set=response_lines, value="Server")
    res_field['http.response-content-type'] = get_header_value(header_set=response_lines, value="Content-Type")
    for response in list(set(response_lines + request_lines)):
        key_value = response.replace("\r", "").split(":")
        if len(key_value) == 2:
            key = key_value[0].replace(" ", "").replace("-", "_").lower()
            value = key_value[1].replace(" ", "")
            if f"src_{key}" in otherHeader:
                res_field[f"src_{key}"] = value
            if f"dst_{key}" in otherHeader:
                res_field[f"dst_{key}"] = value
    return res_field


def get_typed(all_packages, publicField):
    try:
        l = [get_detail_by_package(package, publicField) for package in all_packages]
        return l
    except Exception as e:
        traceback.print_exc()
        print(e)
        log_e(e)
        return []


def process_session_id_disk(node, es, packet_pos):
    packetPos = packet_pos
    fix_pos(node, packetPos, es)
    has_handle_header = False
    file_num = 0
    item_pos = 0
    header = bytearray()

    byte_array = bytearray(0xfffe)
    next_packet = 0
    b_offset = 0
    packets = {}
    try:
        for pos_item in packetPos:
            if pos_item < 0:
                file_num = pos_item * -1
                continue
            o_pcap = Pcap.get(f"{node}:{file_num}")
            if o_pcap.is_corrupt():
                continue
            elif not o_pcap.is_open():
                file = db.Db.get_file_by_file_id(node=node, num=file_num, es=es)
                if not file:
                    print(
                        f"Only have SPI data, PCAP file no longer available. Couldn't look up {node}-{file_num} in files index")
                    continue
                try:
                    o_pcap = Pcap.get(f"{node}:{file_num}")
                    o_pcap.open(file)
                except Exception as err:
                    print(f"ERROR - Couldn't open file {err}")
                    continue
            if not has_handle_header:
                header = o_pcap.read_header()
                has_handle_header = True
            pcap, packet, err_msg = process_file(o_pcap, pos_item, item_pos, len(packetPos), id)
            print(f"read:{len(packet)}")
            if err_msg:
                print(err_msg)
                continue
            if pcap and packet:
                packets[item_pos] = packet
                while next_packet in packets:
                    buffer = packets[next_packet]
                    del packets[next_packet]
                    next_packet = next_packet + 1
                    if b_offset + len(buffer) > len(byte_array):
                        header = byte_array[:b_offset]
                        b_offset = 0
                        byte_array = bytearray(0xfffe)
                    byte_array[b_offset:b_offset + len(buffer)] = buffer
                    b_offset += len(buffer)
                item_pos = item_pos + 1
    except Exception as e:
        print(e)
    return header + byte_array[:b_offset]


def get_statistic_fields(packets):
    length_ranges = {
        "0_19": (0, 19),
        "20_39": (20, 39),
        "40_79": (40, 79),
        "80_159": (80, 159),
        "160_319": (160, 319),
        "320_639": (320, 639),
        "640_1279": (640, 1279),
        "1280_2559": (1280, 2559),
        "2560_5119": (2560, 5119),
        "more_than_5120": (5120, float('inf'))
    }

    def get_length_range(length):
        for key, (min_len, max_len) in length_ranges.items():
            if min_len <= length <= max_len:
                return key
        return "more_than_5120"

    packet_lengths = {key: [] for key in length_ranges}
    total_length = 0
    packet_len_total_count = len(packets)
    for packet in packets:
        length = len(packet)
        length_range = get_length_range(length)
        packet_lengths[length_range].append(length)
        total_length += length
    total_time = packets[-1].time - packets[0].time if packet_len_total_count > 1 else 1
    packet_len_average = round(total_length / packet_len_total_count, 5) if packet_len_total_count > 0 else 0
    packet_len_min = min(len(packet) for packet in packets) if packets else 0
    packet_len_max = max(len(packet) for packet in packets) if packets else 0
    packet_len_rate = round((packet_len_total_count / total_time) / 1000, 5) if total_time > 0 else 0
    packet_size = [len(p) for p in packets]
    field_map = {
        "packet_size_mean": float(round(np.mean(packet_size), 5)),
        "packet_size_variance": float(round(np.var(packet_size), 5)),
        'packet_len_total_count': packet_len_total_count,
        'packet_len_total_average': packet_len_average,
        'packet_len_total_min': packet_len_min,
        'packet_len_total_max': packet_len_max,
        'packet_len_total_rate': float(packet_len_rate),
        'packet_len_total_percent': 1
    }
    for length_range, lengths in packet_lengths.items():
        count = len(lengths)
        if count > 0:
            average = round(sum(lengths) / count, 5)
            min_val = min(lengths)
            max_val = max(lengths)
        else:
            average = min_val = max_val = 0
        packet_len_rate = round((count / total_time) / 1000, 5) if total_time > 0 else 0
        percent = round(count / packet_len_total_count, 5) if packet_len_total_count > 0 else 0
        field_map.update({
            f"packet_len_{length_range}_count": count,
            f"packet_len_{length_range}_average": average,
            f"packet_len_{length_range}_min": min_val,
            f"packet_len_{length_range}_max": max_val,
            f"packet_len_{length_range}_rate": float(packet_len_rate),
            f"packet_len_{length_range}_percent": percent
        })
    return field_map


def get_all_packets(packets):
    http_request_pattern = re.compile(r'^(GET|POST|HEAD|PUT|DELETE|OPTIONS|PATCH) ')
    http_response_pattern = re.compile(r'^HTTP/')
    last_is_request = None
    rs_packet = {
        "request": "",
        "response": ""
    }
    rs_packets = []
    all_req = []
    all_res = []
    for packet_item in packets:
        if TCP in packet_item and Raw in packet_item:
            raw_data = packet_item[Raw].load.decode(errors='ignore')
            if http_request_pattern.match(raw_data):
                all_req.append(packet_item)
                if last_is_request is None:
                    rs_packet['request'] = raw_data
                elif last_is_request:
                    rs_packets.append(rs_packet)
                    rs_packet["request"] = raw_data
                else:
                    rs_packet["request"] = raw_data
                last_is_request = True
            elif http_response_pattern.match(raw_data):
                all_res.append(packet_item)
                if last_is_request is None:
                    rs_packet["response"] = raw_data
                    rs_packets.append(rs_packet)
                elif last_is_request:
                    rs_packet["response"] = raw_data
                    rs_packets.append(rs_packet)
                else:
                    rs_packet["request"] = raw_data
                    rs_packets.append(rs_packet)
                last_is_request = False
            else:
                last_is_request = None
    if rs_packet['request'] != "" or rs_packet["response"] != "":
        rs_packets.append(rs_packet)
    return rs_packets, all_req, all_res


def calculate_average_time_interval(packets):
    if len(packets) < 2:
        return 0

    intervals = [(packets[i].time - packets[i - 1].time) for i in range(1, len(packets))]
    average_interval = sum(intervals) / len(intervals)
    # 如果结果不是0，保留5位小数
    return round(average_interval, 5) if average_interval != 0 else 0


def calculate_variance(packets, average_interval):
    if len(packets) < 2:
        return 0

    intervals = [(packets[i].time - packets[i - 1].time) for i in range(1, len(packets))]

    # 计算方差
    variance = sum((interval - average_interval) ** 2 for interval in intervals) / len(intervals)
    return round(variance, 5) if variance != 0 else 0


def get_dns_domain_length(packets):
    domain_name = ""
    for packet in packets:
        if DNS in packet:
            dns_layer = packet[DNS]
            if dns_layer.qd:
                domain_name = dns_layer.qd.qname.decode('utf-8')
                print(f"DNS 请求的域名: {domain_name}")
                break
    return len(domain_name)


def thread_in_process(config_yaml,
                      index_session, session, per_process,
                      start_time,
                      log):
    results = []
    try:
        initial_time = datetime.now()
        protocol = session['protocol']
        need_read_tcp = "tcp" in protocol and "http" in protocol
        read_packet_log = ""
        # 添加header列
        for h in otherHeader + statisticHeader:
            session.update({h: ""})
        if need_read_tcp:
            http_auth = (config_yaml['es']['account'], config_yaml['es']['password']) if config_yaml['es'][
                'use_http_auth'] \
                else None
            es = Elasticsearch(config_yaml['es']['url'],
                               http_auth=http_auth,
                               verify_certs=False)
            pos = []
            if isinstance(session['packetPos'], str):
                try:
                    pos = json.loads(session['packetPos'])
                except Exception as e:
                    pos = []
            if len(pos) == 0:
                print("pos len is 0")
            else:
                try:
                    packets = rdpcap(
                        io.BytesIO(process_session_id_disk_simple(id=session['id'], node=session['node'], es=es,
                                                                  packet_pos=pos)))
                    if packets is None or len(packets) == 0:
                        results.append(edit_session_and_save(session=session))
                    else:
                        session.update(get_statistic_fields(packets))
                        all_packages, all_req, all_res = get_all_packets(packets)
                        dns_domain_length = get_dns_domain_length(packets)
                        if len(all_packages) != 0:
                            all_req_count = [len(item) for item in all_req]
                            all_res_count = [len(item) for item in all_res]
                            time_mean_req = calculate_average_time_interval(all_req)
                            time_mean_res = calculate_average_time_interval(all_res)
                            reqs = [item['request'] for item in all_packages]
                            param_counts = []
                            for res in reqs:
                                l = []
                                for param in res.split("\n"):
                                    p = f"{param}".strip()
                                    if p != "" and len(p.strip().split(": ")) == 2:
                                        l.append(p)
                                param_counts.append(len(l))
                            session.update({"dns_domain_length_mean": round(np.mean(dns_domain_length), 5),
                                            "dns_domain_length_var": round(np.var(dns_domain_length), 5),
                                            "all_req_packet_size_mean": round(np.mean(all_req_count), 5),
                                            "all_req_packet_size_var": round(np.var(all_req_count), 5),
                                            "all_req_packet_time_period_mean": time_mean_req,
                                            "all_req_packet_time_period_var": calculate_variance(all_req,
                                                                                                 time_mean_req),
                                            "all_res_packet_size_mean": round(np.mean(all_res_count), 5),
                                            "all_res_packet_size_var": round(np.var(all_res_count), 5),
                                            "all_res_packet_time_period_mean": time_mean_res,
                                            "all_res_packet_time_period_var": calculate_variance(all_res,
                                                                                                 time_mean_res),
                                            "req_header_count_mean": round(np.mean(param_counts), 5),
                                            "req_header_count_var": round(np.var(param_counts), 5)})
                            typed_session_list = get_typed(all_packages, publicField=session)
                            results.extend(typed_session_list)
                except Exception as e:
                    results.append(edit_session_and_save(session=session))
        else:
            results.append(edit_session_and_save(session=session))
        print(
            f"{log}第{index_session + 1}/{len(per_process)}个session;本条session耗时：{datetime.now() - initial_time};总耗时：{str(datetime.now() - start_time)};{read_packet_log}")
    except Exception as e:
        print(f"错误:{e}")
        log_e(e)
    return results


def package_session_process(config_yaml, per_process, start_time, file_lock, log):
    data_file_path = f"{out_folder}{os.sep}{config_yaml['session']['time_data_out']}"
    warnings.filterwarnings('ignore')
    with ThreadPoolExecutor(max_workers=config_yaml['task_package']['thread_in_process']) as pool:
        futures = []
        for index_session, session in enumerate(per_process):
            future = pool.submit(thread_in_process,
                                 config_yaml,
                                 index_session, session, per_process,
                                 start_time, log)
            futures.append(future)
        res = []
        for future in futures:
            res.extend(future.result())
        save_packets(origin_data=res, file_path=data_file_path, file_lock=file_lock)
        print(f"已保存{len(res)}")


def edit_session_and_save(session):
    session["initRTT"] = firstOrZero(session.get("initRTT", 0))
    session["length"] = firstOrZero(session.get("length", 0))
    return session


def get_package(config_yaml, sample):
    start_time = datetime.now()
    num_subsections, subsection_sizes = split_samples(sample, config_yaml['task_package']['per_subsection'])
    start_index = 0
    manager = multiprocessing.Manager()
    file_lock = manager.Lock()
    for ind, item in enumerate(subsection_sizes):
        subsection = sample[start_index:start_index + item]
        res = split_process(subsection, config_yaml['task_package']['process'])
        split_process_res = [x for x in res if x != 0]
        params = []
        start_index2 = 0
        for index, item2 in enumerate(split_process_res):
            per_process = subsection[start_index2:start_index2 + item2]
            params.append((
                config_yaml,
                per_process,
                start_time,
                file_lock,
                f"第{ind + 1}/{len(subsection_sizes)}个分段；第{index + 1}/{len(split_process_res)}个进程;",
            ))
            start_index2 += item2
        pool = Pool(processes=len(split_process_res))
        results = [pool.apply_async(package_session_process, args) for args in params]
        for result in results:
            result.get()
        pool.close()
        pool.join()
        start_index += item
    print("包分段处理完毕，记得启动 arkime capture")
