import sys
import logging
import typing as t

import numpy as np

from app.config import config
from app.utils import nethash

LOGGER = logging.getLogger(__name__)

class Flow(object):
    """
    An object that represents summary for an individual flow
    Designed to be instantiated once by the Flows class
    and set to different flow context by packet object
    """

    def __init__(self, flow_cache, flow_archive):
        """
        Initialise with references to logger and flow_cache dictionary
        and mode of operation.
        Parameters:
            logger: logger object
            flow_cache: reference to dictionary of flows
        """
        self.flow_cache = flow_cache
        self.flow_archive = flow_archive
        self.mode = config.mode
        # Get value from config:
        self.flow_expiration = config.flow_expiration
        LOGGER.info(
            "Flows will expire after %s seconds of inactivity", self.flow_expiration)
        LOGGER.debug("Flow object instantiated in mode=%s", config.mode)

    def update(self, packet):
        """
        Add or update flow in in flow_cache dictionary
        """
        if packet.flow_hash in self.flow_cache:
            # Found existing flow in dict, update it:
            if self._is_current_flow(packet, self.flow_cache[packet.flow_hash]):
                # Update standard flow parameters:
                # flow_hash = packet.flow_hash
                # flow_dict = self.flow_cache[flow_hash]
                # if flow_dict['pktTotalCount'] < 5:
                self._update_found(packet)
                if self.mode == 'b':
                    # Also update bidirectional flow parameters:
                    self._update_found_bidir(packet)
            else:
                # Expired flow so archive it:
                self._archive_flow(packet)
                # Delete from dict:
                self.flow_cache.pop(packet.flow_hash, None)
                # Now create as a new flow based on current packet:
                self._create_new(packet)
                if self.mode == 'b':
                    self._create_new_bidir(packet)
        else:
            # Flow doesn't exist yet, create it:
            self._create_new(packet)
            if self.mode == 'b':
                self._create_new_bidir(packet)

    def _update_found(self, packet):
        """
        Update existing flow in flow_cache dictionary with standard
        (non-bidirectional) parameters
        """
        flow_hash = packet.flow_hash
        flow_dict = self.flow_cache[flow_hash]
        # Store size of this packet:
        flow_dict['length'].append(packet.length)
        # Update the count of packets and octets:
        flow_dict['pktTotalCount'] += 1
        flow_dict['octetTotalCount'] += packet.length
        # Update the min/max/avg/std_dev of the packet sizes:
        flow_dict['min_ps'] = min(flow_dict['length'])
        flow_dict['max_ps'] = max(flow_dict['length'])
        flow_dict['avg_ps'] = flow_dict['octetTotalCount'] / flow_dict['pktTotalCount']
        flow_dict['std_dev_ps'] = np.std(flow_dict['length'])
        # Store the timestamps of the newly captured packet:
        flow_dict['times'].append(packet.timestamp)
        # As we have now at least 2 packets in the flow, we can calculate the packet-inter-arrival-time.
        # We decrement the packet counter every single time, otherwise it would start from 2
        # The first piat will be the current timestamp minus the timestamp of the previous packet:
        flow_dict['iats'].append(flow_dict['times'][-1]
                                 - flow_dict['times'][-2])
        # Update the flow end/duration (the start does not change)
        flow_dict['flowEnd'] = packet.timestamp
        # flow_dict['flowDuration'] = (packet.timestamp - flow_dict['flowStart'])
        # at last update the min/max/avg/std_dev of packet-inter-arrival-times
        flow_dict['min_piat'] = min(flow_dict['iats'])
        flow_dict['max_piat'] = max(flow_dict['iats'])
        flow_dict['avg_piat'] = sum(flow_dict['iats']) / (flow_dict['pktTotalCount'] - 1)

        flow_dict['std_dev_piat'] = np.std([float(x) for x in flow_dict['iats']])
        if flow_dict['pktTotalCount'] == 5:
            flow_dict['_octetTotalCount'] = flow_dict['octetTotalCount']
            flow_dict['_min_ps'] = min(flow_dict['length'])
            flow_dict['_max_ps'] = max(flow_dict['length'])
            flow_dict['_avg_ps'] = flow_dict['octetTotalCount'] / \
                flow_dict['pktTotalCount']
            flow_dict['_std_dev_ps'] = np.std(flow_dict['length'])
            flow_dict['_flowDuration'] = (
                packet.timestamp - flow_dict['flowStart'])
            flow_dict['_avg_piat'] = sum(
                flow_dict['iats']) / (flow_dict['pktTotalCount'] - 1)
            flow_dict['_std_dev_piat'] = np.std([float(x) for x in flow_dict['iats']])

    def _update_found_bidir(self, packet):
        """
        Update existing flow in flow_cache dictionary with
        bidirectional parameters (separately to standard parameters)
        """
        flow_hash = packet.flow_hash
        flow_dict = self.flow_cache[flow_hash]
        # Determine packet direction (f=forward, r=reverse):
        direction = self.packet_dir(packet, flow_dict)
        # Update keys dependant on the direction (f or b):
        if direction == 'f':
            # Forward (f) direction
            # Store size of this packet:
            flow_dict['f_length'].append(packet.length)
            # Update the count of packets and octets:
            flow_dict['f_pktTotalCount'] += 1
            flow_dict['f_octetTotalCount'] += packet.length
            # Update the min/max/avg/std_dev of the packet sizes:
            flow_dict['f_min_ps'] = min(flow_dict['f_length'])
            flow_dict['f_max_ps'] = max(flow_dict['f_length'])
            flow_dict['f_avg_ps'] = flow_dict['f_octetTotalCount'] / \
                flow_dict['f_pktTotalCount']
            flow_dict['f_std_dev_ps'] = np.std(flow_dict['f_length'])
            # Store the timestamps of the newly captured packets:
            flow_dict['f_times'].append(packet.timestamp)
            # Do inter-packet arrival time if have at least 2 packets:
            if (flow_dict['f_pktTotalCount'] > 1):
                flow_dict['f_iats'].append(flow_dict['f_times'][-1]
                                           - flow_dict['f_times'][-2])
            # Update the flow end/duration (the start does not change)
            flow_dict['f_flowEnd'] = packet.timestamp
            flow_dict['f_flowDuration'] = (
                packet.timestamp - flow_dict['f_flowStart'])
            # at last update the min/max/avg/std_dev of packet-inter-arrival-times
            flow_dict['f_min_piat'] = min(flow_dict['f_iats'])
            flow_dict['f_max_piat'] = max(flow_dict['f_iats'])
            flow_dict['f_avg_piat'] = sum(
                flow_dict['f_iats']) / (flow_dict['f_pktTotalCount'] - 1)
            flow_dict['f_std_dev_piat'] = np.std([float(x) for x in flow_dict['f_iats']])
        else:
            # Backward (b) direction
            # Note: this may be the first time we've see backwards dir packet.
            # Store size of this packet:
            flow_dict['b_length'].append(packet.length)
            # Update the count of packets and octets:
            flow_dict['b_pktTotalCount'] += 1
            flow_dict['b_octetTotalCount'] += packet.length
            # Update the min/max/avg/std_dev of the packet sizes:
            flow_dict['b_min_ps'] = min(flow_dict['b_length'])
            flow_dict['b_max_ps'] = max(flow_dict['b_length'])
            flow_dict['b_avg_ps'] = flow_dict['b_octetTotalCount'] / \
                flow_dict['b_pktTotalCount']
            flow_dict['b_std_dev_ps'] = np.std(flow_dict['b_length'])
            # Store the timestamps of the newly captured packets:
            flow_dict['b_times'].append(packet.timestamp)
            # Do inter-packet arrival time if have at least 2 packets:
            if (flow_dict['b_pktTotalCount'] < 2):
                # First time, so set some stuff:
                flow_dict['b_flowStart'] = packet.timestamp
            else:
                # Not first time:
                flow_dict['b_iats'].append(flow_dict['b_times'][-1]
                                           - flow_dict['b_times'][-2])
                flow_dict['b_flowDuration'] = (
                    packet.timestamp - flow_dict['b_flowStart'])
                # Update the min/max/avg/std_dev of packet-inter-arrival-times:
                flow_dict['b_min_piat'] = min(flow_dict['b_iats'])
                flow_dict['b_max_piat'] = max(flow_dict['b_iats'])
                flow_dict['b_avg_piat'] = sum(
                    flow_dict['b_iats']) / (flow_dict['b_pktTotalCount'] - 1)
                flow_dict['b_std_dev_piat'] = np.std([float(x) for x in flow_dict['b_iats']])
            # Update the flow end/duration (the start does not change):
            flow_dict['b_flowEnd'] = packet.timestamp

    def _create_new(self, packet):
        """
        Create new flow in flow_cache dictionary with standard
        (non-bidirectional) parameters
        """
        flow_hash = packet.flow_hash
        # Create new key etc in flow dict for this flow:
        # Initialise the new flow key:
        self.flow_cache[flow_hash] = {}
        flow_dict = self.flow_cache[flow_hash]
        # Store the flow parameters for packet header values:
        flow_dict['src_ip'] = packet.ip_src
        flow_dict['dst_ip'] = packet.ip_dst
        flow_dict['src_region'] = packet.region_src
        flow_dict['dst_region'] = packet.region_dst
        flow_dict['proto'] = packet.proto
        flow_dict['src_port'] = packet.tp_src
        flow_dict['dst_port'] = packet.tp_dst
        # Store the size of the first packet:
        flow_dict['length'] = []
        flow_dict['length'].append(packet.length)
        # Store the packet size and number of octets:
        flow_dict['pktTotalCount'] = 1
        flow_dict['octetTotalCount'] = packet.length
        # Set the min/max/avg/std_dev of packet sizes
        # (in case there will be no more packets belonging to the flow):
        flow_dict['min_ps'] = packet.length
        flow_dict['max_ps'] = packet.length
        flow_dict['avg_ps'] = packet.length
        flow_dict['std_dev_ps'] = np.std(flow_dict['length'])
        # Store the timestamps of the packets:
        flow_dict['times'] = []
        flow_dict['times'].append(packet.timestamp)
        flow_dict['iats'] = []
        # store the flow start/end/duration
        flow_dict['flowStart'] = packet.timestamp
        flow_dict['flowEnd'] = packet.timestamp
        flow_dict['flowDuration'] = 0
        # Set the min/max/avg/std_dev of packet-inter arrival times
        # (in case there will be no more packets belonging to the flow):
        flow_dict['min_piat'] = 0
        flow_dict['max_piat'] = 0
        flow_dict['avg_piat'] = 0
        flow_dict['std_dev_piat'] = 0

    def _create_new_bidir(self, packet):
        """
        Add bidir parameters to new flow in flow_cache dictionary
        """
        flow_hash = packet.flow_hash
        flow_dict = self.flow_cache[flow_hash]
        # Set up keys in preparation:
        flow_dict['f_length'] = []
        flow_dict['f_times'] = []
        flow_dict['f_iats'] = []
        flow_dict['b_length'] = []
        flow_dict['b_times'] = []
        flow_dict['b_iats'] = []
        flow_dict['b_pktTotalCount'] = 0
        flow_dict['b_octetTotalCount'] = 0
        flow_dict['b_min_ps'] = 0
        flow_dict['b_max_ps'] = 0
        flow_dict['b_avg_ps'] = 0
        flow_dict['b_std_dev_ps'] = 0
        flow_dict['b_flowStart'] = 0
        flow_dict['b_flowEnd'] = 0
        flow_dict['b_flowDuration'] = 0
        flow_dict['b_min_piat'] = 0
        flow_dict['b_max_piat'] = 0
        flow_dict['b_avg_piat'] = 0
        flow_dict['b_std_dev_piat'] = 0
        # Determine packet direction (f=forward, r=reverse):
        direction = self.packet_dir(packet, flow_dict)
        # Update keys dependant on the direction (f or b):
        if direction == 'f':
            # Forward (f) direction
            # Store the size of the first packet:
            flow_dict['f_length'].append(packet.length)
            # Store the packet size and number of octets:
            flow_dict['f_pktTotalCount'] = 1
            flow_dict['f_octetTotalCount'] = packet.length
            # Set the min/max/avg/std_dev of packet sizes
            # (in case there will be no more packets belonging to the flow):
            flow_dict['f_min_ps'] = packet.length
            flow_dict['f_max_ps'] = packet.length
            flow_dict['f_avg_ps'] = packet.length
            flow_dict['f_std_dev_ps'] = np.std(flow_dict['f_length'])
            # Store the timestamps of the packets:
            flow_dict['f_times'].append(packet.timestamp)
            # store the flow start/end/duration
            flow_dict['f_flowStart'] = packet.timestamp
            flow_dict['f_flowEnd'] = packet.timestamp
            flow_dict['f_flowDuration'] = 0
            # Set the min/max/avg/std_dev of packet-inter arrival times
            # (in case there will be no more packets belonging to the flow):
            flow_dict['f_min_piat'] = 0
            flow_dict['f_max_piat'] = 0
            flow_dict['f_avg_piat'] = 0
            flow_dict['f_std_dev_piat'] = 0
        else:
            # Backward (b) direction
            # Store the size of the first packet:
            flow_dict['b_length'].append(packet.length)
            # Store the packet size and number of octets:
            flow_dict['b_pktTotalCount'] = 1
            flow_dict['b_octetTotalCount'] = packet.length
            # Set the min/max/avg/std_dev of packet sizes
            # (in case there will be no more packets belonging to the flow):
            flow_dict['b_min_ps'] = packet.length
            flow_dict['b_max_ps'] = packet.length
            flow_dict['b_avg_ps'] = packet.length
            flow_dict['b_std_dev_ps'] = np.std(flow_dict['b_length'])
            # Store the timestamps of the packets:
            flow_dict['b_times'].append(packet.timestamp)
            # store the flow start/end/duration
            flow_dict['b_flowStart'] = packet.timestamp
            flow_dict['b_flowEnd'] = packet.timestamp
            flow_dict['b_flowDuration'] = 0
            # Set the min/max/avg/std_dev of packet-inter arrival times
            # (in case there will be no more packets belonging to the flow):
            flow_dict['b_min_piat'] = 0
            flow_dict['b_max_piat'] = 0
            flow_dict['b_avg_piat'] = 0
            flow_dict['b_std_dev_piat'] = 0

    def _is_current_flow(self, packet, flow_dict):
        """
        Check if flow is current or has expired.
        Only check if the flow hash is already known
        True = flow has not expired
        False = flow has expired, i.e. PIAT from previous packet
        in flow is greater than flow expiration threshold
        """
        if flow_dict['iats']:
            if (packet.timestamp - flow_dict['times'][-1]) > self.flow_expiration:
                # Flow has expired:
                return False
            else:
                # Flow has not expired:
                return True
        elif flow_dict['pktTotalCount'] == 1:
            # Was only 1 packet so no PIAT so use packet timestamp
            if (packet.timestamp - flow_dict['flowStart']) > self.flow_expiration:
                # Flow has expired:
                return False
            else:
                # Flow has not expired:
                return True
        else:
            # No packets???
            LOGGER.warning("Strange condition...")
            return True

    def _archive_flow(self, packet):
        """
        Move a flow record to archive dictionary, indexed by a
        longer more unique key
        """
        flow_hash = packet.flow_hash
        flow_dict = self.flow_cache[flow_hash]
        start_timestamp = flow_dict['flowStart']
        ip_src = flow_dict['src_ip']
        ip_dst = flow_dict['dst_ip']
        proto = flow_dict['proto']
        tp_src = flow_dict['src_port']
        tp_dst = flow_dict['dst_port']
        # Create new more-specific hash key for archiving:
        if self.mode == 'b':
            if proto == 6 or proto == 17:
                # Generate a directional 6-tuple flow_hash:
                new_hash = nethash.hash_b6((ip_src,
                                            ip_dst, proto, tp_src,
                                            tp_dst, start_timestamp))
            else:
                # Generate a directional 4-tuple flow_hash:
                new_hash = nethash.hash_b4((ip_src,
                                            ip_dst, proto,
                                            start_timestamp))
        elif self.mode == 'u':
            if proto == 6 or proto == 17:
                # Generate a directional 6-tuple flow_hash:
                new_hash = nethash.hash_u6((ip_src,
                                            ip_dst, proto, tp_src,
                                            tp_dst, start_timestamp))
            else:
                # Generate a directional 4-tuple flow_hash:
                new_hash = nethash.hash_u4((ip_src,
                                            ip_dst, proto,
                                            start_timestamp))
        # Check key isn't already used in archive:
        if new_hash in self.flow_archive:
            LOGGER.warning("archive duplicate flow key=%s", new_hash)
            return
        # Copy to flow archive:
        self.flow_archive[new_hash] = flow_dict

        # Delete from current flows:

    def packet_dir(self, packet, flow_dict):
        """
        Determine packet direction (f=forward, r=reverse)
        """
        if packet.ip_src == flow_dict['src_ip']:
            return 'f'
        elif packet.ip_src == flow_dict['dst_ip']:
            return 'b'
        else:
            LOGGER.critical("Uh oh, something went wrong. Exiting")
            sys.exit()
