import csv
import json
import logging
import os
import time
import typing as t
from datetime import datetime

import numpy as np
from scapy.all import Packet, PcapReader, sniff, wrpcapng
from scapy.layers.inet6 import *

from app.config import config
from app.proto import FIELD_NAMES, HTTPPacketRecord, PacketRecord, TLSPacketRecord
from app.utils import utc_stamp_to_utc

from .flow import Flow
from .processor import parse_packet

LOGGER = logging.getLogger("FlowRecorder")


class FlowRecorder:
    def __init__(self):
        self.info_frequency = config.info_frequency
        self.packet_save_freq = config.packet_save_frequency

        # Record all raw packages
        self.packets: t.List[Packet] = []
        # Packages after parsing
        self.packet_records: t.List[PacketRecord] = []

        # Mode is u for unidirectional or b for bidirectional:
        self.mode = config.mode

        # Python dictionaries to hold current and archived flow records:
        self.flow_cache = t.OrderedDict()
        self.flow_archive = t.OrderedDict()
        self.url_dict = t.OrderedDict()

        # Create a Flow object for flow operations:
        self.flow = Flow(self.flow_cache, self.flow_archive)

        # Counter for packets that we ignored for various reasons:
        self.packets_ignored = 0

        # Counter for all the processed packets:
        self.packets_processed = 0

        # Counter for all http/tls packets:
        self.packets_marked = 0

    def _ingest_packet(self, packet: Packet):
        """
        ingest a packet into flows.
        """
        self.packets.append(packet)

        layer, packet = parse_packet(packet)

        if packet and packet.ingested:
            # Update the flow with packet info:
            if layer != "raw":
                self.packets_marked += 1
            self.flow.update(packet)
            self.packets_processed += 1
            self.packet_records.append(packet)

            if self.packets_processed % config.info_frequency == 0:
                LOGGER.info("Already processed %d packets",
                            self.packets_processed)
        else:
            self.packets_ignored += 1

        if config.capture and len(self.packets) % self.packet_save_freq == 0:
            self.save(self.packets[-self.packet_save_freq:])

    def run_live(self):
        try:
            LOGGER.info(f"Starting sniffing on interface {config.interface}")
            sniff(iface=config.interface, prn=self._ingest_packet,
                  count=config.numbers)
        except (KeyboardInterrupt or SystemExit):
            LOGGER.info(
                "SIGINT (ctrl+c) decteted. Quit capturing living packets.")
        pass

    #####################
    # Analyze Functions #
    #####################

    def analyze(self):
        self._save_flow_info(config.flow_info_fullname)
        self._save_url_info(config.url_info_fullname)
        self._save_dst_ip_info(config.dst_ip_info_fullname)
        self.stats()

    def _save_dst_ip_info(self, output_json: str):
        organized_data = {}

        for record in self.packet_records:
            dst_ip = record.ip_dst
            if dst_ip not in organized_data:
                organized_data[dst_ip] = {
                    "dst_ip": dst_ip,
                    "location": record.region_dst,
                    "ports": set(),
                    "url": set(),
                    "server_name": set()
                }

            # 更新端口信息
            if record.tp_dst:  # 确保 tp_dst 不是 0 或 None
                organized_data[dst_ip]["ports"].add(record.tp_dst)

            # 对于HTTPPacketRecord，更新URL
            if isinstance(record, HTTPPacketRecord):
                organized_data[dst_ip]["url"].add(
                    record.url.split('?')[0])  # 去除GET的http param

            # 对于TLSPacketRecord，更新server_name
            if isinstance(record, TLSPacketRecord):
                organized_data[dst_ip]["server_name"].add(record.server_name)

        # 转换集合为列表
        for data in organized_data.values():
            data["ports"] = list(data["ports"])
            data["url"] = list(data["url"])
            data["server_name"] = list(data["server_name"])
        json.dump(organized_data, open(output_json, 'w',
                  encoding='utf-8'), ensure_ascii=False)
        LOGGER.info("Destination ip info saved to %s", output_json)

    def _save_url_info(self, output_csv: str):
        """
        将url与servername信息写入csv文件
        """
        with open(output_csv, mode='w', newline='') as csv_file:
            csv_writer = csv.DictWriter(csv_file, fieldnames=FIELD_NAMES)
            csv_writer.writeheader()
            for record in self.packet_records:
                if record.__class__ is PacketRecord:
                    continue
                csv_writer.writerow(record.todict())
        LOGGER.info(
            "%d packet url/servername record(s) saved to %s", self.packets_marked, output_csv)

    def _save_flow_info(self, file_name):
        """
        Write all flow records out to CSV file
        """
        with open(file_name, mode='w', newline='') as csv_file:
            if self.mode == 'u':
                # Unidirectional fields:
                fieldnames = ['src_ip', 'src_region', 'src_port', 'dst_ip', 'dst_region', 'dst_port',
                              'proto', 'pktTotalCount', 'octetTotalCount',
                              'min_ps', 'max_ps', 'avg_ps', 'std_dev_ps',
                              'flowStart', 'flowEnd', 'flowDuration',
                              'min_piat', 'max_piat', 'avg_piat', 'std_dev_piat',
                              '_octetTotalCount', '_min_ps', '_max_ps', '_avg_ps',
                              '_std_dev_ps', '_flowDuration', '_avg_piat', '_std_dev_piat']
            else:
                # Bidirectional fields:
                fieldnames = ['src_ip', 'src_port', 'dst_ip', 'dst_port',
                              'proto', 'pktTotalCount', 'octetTotalCount',
                              'min_ps', 'max_ps', 'avg_ps', 'std_dev_ps',
                              'flowStart', 'flowEnd', 'flowDuration',
                              'min_piat', 'max_piat', 'avg_piat', 'std_dev_piat',
                              'f_pktTotalCount', 'f_octetTotalCount',
                              'f_min_ps', 'f_max_ps', 'f_avg_ps', 'f_std_dev_ps',
                              'f_flowStart', 'f_flowEnd', 'f_flowDuration',
                              'f_min_piat', 'f_max_piat', 'f_avg_piat',
                              'f_std_dev_piat',
                              'b_pktTotalCount', 'b_octetTotalCount',
                              'b_min_ps', 'b_max_ps', 'b_avg_ps', 'b_std_dev_ps',
                              'b_flowStart', 'b_flowEnd', 'b_flowDuration',
                              'b_min_piat', 'b_max_piat', 'b_avg_piat',
                              'b_std_dev_piat'
                              ]
            writer = csv.DictWriter(
                csv_file, fieldnames=fieldnames, extrasaction='ignore')
            # Write header:
            writer.writeheader()
            # Write archive flows as rows:
            for flow_dict in self.flow_archive.items():
                try:
                    _flow_dict = self._calculate(flow_dict[1])
                    writer.writerow(_flow_dict)
                except:
                    pass
            # Write current flows as rows:
            for flow_dict in self.flow_cache.items():
                _flow_dict = self._calculate(flow_dict[1])
                writer.writerow(_flow_dict)

    def _calculate(self, flow_dict):
        flow_dict['min_ps'] = min(flow_dict['length'])
        flow_dict['max_ps'] = max(flow_dict['length'])
        flow_dict['avg_ps'] = flow_dict['octetTotalCount'] / \
            flow_dict['pktTotalCount']
        flow_dict['std_dev_ps'] = np.std(flow_dict['length'])
        flow_dict['flowDuration'] = (
            flow_dict['flowEnd'] - flow_dict['flowStart'])
        if flow_dict['iats']:
            flow_dict['min_piat'] = min(flow_dict['iats'])
            flow_dict['max_piat'] = max(flow_dict['iats'])
            flow_dict['avg_piat'] = sum(
                flow_dict['iats']) / (flow_dict['pktTotalCount'] - 1)
            flow_dict['std_dev_piat'] = np.std(
                [float(x) for x in flow_dict['iats']])  # 显式转换为float
        flow_dict['flowStart'] = utc_stamp_to_utc(flow_dict['flowStart'])
        flow_dict['flowEnd'] = utc_stamp_to_utc(flow_dict['flowEnd'])
        return flow_dict

    def stats(self):
        """
        Log the stats for flows
        """
        LOGGER.info("Result statistics")
        LOGGER.info("-----------------")
        LOGGER.info("Flow Records: %s", len(self.flow_cache))
        LOGGER.info("Additional Archived Flow Records: %s",
                    len(self.flow_archive))
        LOGGER.info("Ignored Packets: %s", self.packets_ignored)
        LOGGER.info("Processed Packets: %s", self.packets_processed)
        LOGGER.info("Marked Packets: %s", self.packets_marked)

    #################
    # Load and Save #
    #################
    def load(self, catalog: str = None):
        """
        加载pcap文件 

        `catalog`可以是文件路径或文件夹路径，若为文件夹路径则加载该目录下所有文件
        """
        if os.path.isdir(catalog):
            file_list = sorted(
                os.listdir(catalog), key=lambda file: os.path.getmtime(os.path.join(catalog, file)))
        else:
            file_list = [catalog]

        start = time.time()
        for filename in file_list:
            _filename = os.path.join(catalog, filename)
            with PcapReader(_filename) as pcap_reader:
                for packet in pcap_reader:
                    self._ingest_packet(packet)
        end = time.time()
        LOGGER.info(
            f"loaded {len(file_list)} file(s), totally spend {end - start}s")

    def save(self, slices: t.List):
        """
        保存原始报文
        """
        filename = os.path.join(self.packet_dir_fullname,
                                f'{datetime.now().strftime("%Y%m%d%H%M%S")}.pcapng')

        wrpcapng(filename, slices)


####################
# Filter Functions #
####################


def is_ipv4(packet: Packet):
    return not packet.haslayer(IPv6)
