import argparse
import csv
import urllib.request
import json
import ipaddress
import datetime
import subprocess
from enum import Enum
from ipaddress import IPv4Address

from TrafficGenerator.traceroute_utils import number_of_hops_to_hostname

class Action(Enum):
    filter = 1
    rank = 2

# Firewall consts
INCOMING_INTERFACE = 'Lan3'
DATE_COLUMN = 1
HOUR_COLUMN = 2
INTERFACE_COLUMN = 3   
DSTPORT_COLUMN = 7
SRCPORT_COLUMN = 8
SOURCE_COLUMN = 9 
DEST_COLUMN = 10

TTL_COLUMN = 100

# TCPDUMP consts
DUMP_PARSER = r'IPAlgorithm\tcpdump.jar'
DUMP_PARSER_ARGS = 'ttl sip sp dip dp time'

class FirewallPacket(object):
    def __init__(self, row):
        self.row = row
        self.time = datetime.datetime.strptime(row[DATE_COLUMN] + ' ' + row[HOUR_COLUMN], "%d%b%Y %H:%M:%S")
        self.src = row[SOURCE_COLUMN]
        self.dst = row[DEST_COLUMN]
        self.src_port = row[SRCPORT_COLUMN]
        self.dst_port = row[DSTPORT_COLUMN]
        if len(row) >= TTL_COLUMN - 1:
            self.ttl = row[TTL_COLUMN]
        else:
            self.ttl = 0

class TCPDUMPPacket(object):
    def __init__(self, ttl, sip, sp, dip, dp, time):
        self.src = IPv4Address(int(sip))
        self.dst = IPv4Address(int(dip))
        self.src_port = sp
        self.dst_port = dp
        self.ttl = abs(int(ttl))
        self.time = float(time)


SOURCES_THRESHOLD = 20
    
DEFAULT_OUTPUT_FILE = 'output_log.txt'


def filter_incoming(file_type, input_log_file, output_log_file, **kwargs):
    input_packets, header = get_incoming_packets(file_type, input_log_file)

    with open(output_log_file, 'w') as output:
            writer = csv.writer(output, delimiter = ' ')
            writer.writerow(header)
            writer.writerows(map(lambda packet: packet.row, input_packets))


def rank_origins(file_type, input_log_file, output_log_file, cut, tracert = False, **kwargs):
    input_packets, header = get_incoming_packets(file_type, input_log_file)

    sources_dict = {}
    for packet in input_packets:
        source = packet.src
        if source in sources_dict:
            sources_dict[source]+=1
        else:
            sources_dict[source]=1

    sources_list = sorted(sources_dict.items(), key=lambda dict_item: dict_item[1], reverse=True)
    reduced_sources_list = list(filter(lambda source: source[1] > SOURCES_THRESHOLD, sources_list))

    asn_dict = {}
    for source in reduced_sources_list:
        try:
            asn, name = query_address_asn(source[0])
        except ipaddress.AddressValueError:
            # Not an IPv4 (Host or IPv6)
            continue
        except IndexError:
            # No known origin
            continue
        if asn in asn_dict:
            asn_dict[asn][1]+=source[1]
            asn_dict[asn][2].append(source[0])
        else:
            asn_dict[asn]=[name, source[1], [source[0]]]

    asn_sources_list = sorted(asn_dict.items(), key=lambda dict_item: dict_item[1][1], reverse=True)

    # Cut the list
    asn_sources_list = asn_sources_list[:cut]

    # Tracert if needed
    if tracert:
        temp_list = []
        for asn_record in asn_sources_list:
            # [1] is for the value index in the (key, value) tuple.
            # [2] is for the IP list in the value tuple which looks like this (name, # of packets, IP list)
            # [0] is for the FIRST IP in the IP list.
            first_ip = asn_record[1][2][0]
            number_of_hops = number_of_hops_to_hostname(first_ip)
            temp_list.append((asn_record[0], [asn_record[1][0], asn_record[1][1], number_of_hops]))
        asn_sources_list = temp_list

    if output_log_file is not None:
        with open(output_log_file, 'w', newline='') as output:
            writer = csv.writer(output, delimiter = ' ')

            if tracert:
                data_index = 3
            else:
                data_index = 2

            for asn, AS_data in asn_sources_list:
                writer.writerow([asn]+AS_data[:data_index])

    return asn_sources_list

def query_address_asn(ipaddr):
    # This raises AddressValueError if not an IPv4 Adress
    ipaddress.IPv4Address(ipaddr)
    request = 'https://stat.ripe.net/data/prefix-routing-consistency/data.json?resource=%s' % (ipaddr, )
    response = urllib.request.urlopen(request)
    str_response = response.readall().decode('utf-8')
    resource = json.loads(str_response)
    route = resource['data']['routes'][0]
    return route['origin'], route['asn_name']
  
    
def get_incoming_packets(file_type, input_log_file):
    input_packets = []
    header = ''
    with open(input_log_file, 'r') as input:
        if file_type == 'firewall':
            reader = csv.reader(input, delimiter = ' ', quotechar='"')
            for row in reader:
                if reader.line_num == 1:
                    header = row
                elif row[INTERFACE_COLUMN] == INCOMING_INTERFACE:
                    input_packets.append(FirewallPacket(row))
        elif file_type == 'tcpdump':
            p = subprocess.Popen('java -jar {0} "{1}" {2}'.format(DUMP_PARSER, input_log_file, DUMP_PARSER_ARGS), stdout = subprocess.PIPE)
            header = ''
            for line in p.stdout.readlines():
                packet_data = line.decode("utf-8").split()
                input_packets.append(TCPDUMPPacket(*packet_data))
        else:
            raise Exception('Unsupported file type')
    
    return input_packets, header


if (__name__ == "__main__"):
    parser = argparse.ArgumentParser()
    parser.add_argument("file_type", choices=['tcpdump', 'firewall'])
    parser.add_argument("input_log_file",)

    subparsers = parser.add_subparsers()
    # Filtered log file subparser
    incoming_parser = subparsers.add_parser('filter_incoming', help='Only produce a log file of incoming traffic')
    incoming_parser.set_defaults(func=filter_incoming)
    incoming_parser.add_argument('output_log_file', nargs='?', default=DEFAULT_OUTPUT_FILE, help='Path of output file. Defaults to %(default)s')

    # AS ranking file
    ranking_parser = subparsers.add_parser('rank_ases', help='Produce a ranking of the origin ASes')
    ranking_parser.set_defaults(func=rank_origins)
    ranking_parser.add_argument('--tracert', action='store_true', help='Traceroutes the top ranked ASes to tell the TTL distance')
    ranking_parser.add_argument('--cut', default=30, help='Show only this amount of ranked ASes')
    ranking_parser.add_argument('output_log_file', nargs='?', default=DEFAULT_OUTPUT_FILE, help='Path of output file. Defaults to %(default)s')

    args = parser.parse_args()
    args.func(**vars(args))
