from typing import Tuple
import json, os, time, glob, logging, argparse
import dpkt
import numpy as np
from tqdm import tqdm

class Analyzer:
    def __init__(self, config:dict={}):
        t = time.strftime("%Y%m%d%H%M%S", time.localtime())
        self.output_dir = config.get("output_dir", "data/output_of_analyzer/{}".format(t))
        self.log_file = config.get("log_file", "analyzer.log")

        logging.basicConfig(
                    filename=self.log_file,
                    format="%(asctime)s - %(levelname)s - %(message)s ",
                    level=logging.INFO
                )
    
    def _contain(self, flows:dict, flow_id:Tuple) -> bool:
        """
        0: not exist
        1: exist, src to dst
        2: exist, dst to src  
        """
        if flow_id in flows: return 1
        rev_flow_id = self._generate_bidirection_flow_id(flow_id[1], flow_id[0], flow_id[3], flow_id[2], flow_id[4])
        if rev_flow_id in flows: return 2
        return 0

    def _generate_bidirection_flow_id(self, src_ip, dst_ip, src_port, dst_port, protocol) -> Tuple:
        """Generate bidirection flow ID.

        last 0 means bidirection flow.
        """
        return (src_ip, dst_ip, src_port, dst_port, protocol, 0)
    
    def _generate_filename(self, original_filename:str, ext:str="json") -> str:
        """
        Example:
            original_filename: "data/DoH_traffic_dataset_subset/00cha.com/0-00cha.com-20220221112513-0.pcap"
            derived filename: "0-00cha.com-20220221112513-0.json"

        Args:
            original_filename: Filename or path.

        Retuens:
            Derived filename.
        """
        return original_filename.rsplit("/", 1)[-1].rsplit(".", 1)[0] + "." + ext

    def _save(self, flows:dict, output_filepath:str):
        with open(output_filepath, "w") as f_out:
            f_out.write(json.dumps({str(key): flows[key] for key in flows}, indent=4))

    def inspect(self, filepath:str, output_dir:str):
        """Inspect pcap file.
        """
        flows = {}

        with open(filepath, "rb") as f_in:
            pcap = dpkt.pcap.Reader(f_in)
        
            for ts, buf in pcap:
                eth = dpkt.ethernet.Ethernet(buf)
                ip = eth.data
                src_ip, dst_ip = int(ip.src.hex(), 16), int(ip.dst.hex(), 16)
                tcp = ip.data
                src_port, dst_port = tcp.sport, tcp.dport
                flow_id = self._generate_bidirection_flow_id(src_ip, dst_ip, src_port, dst_port, "tcp")
                contain = self._contain(flows, flow_id)

                if not contain:
                    flows[flow_id] = {
                        "ts_list": [],
                        "ps_list": [],
                        "direction_list": [],
                        "piat_list": [],
                    }
                    contain = 1

                direction = contain - 1 # 0: src2dst, 1: dst2src
                if direction == 1:
                    flow_id = self._generate_bidirection_flow_id(flow_id[1], flow_id[0], flow_id[3], flow_id[2], flow_id[4])
                flows[flow_id]["ts_list"].append(ts) # timestamp
                flows[flow_id]["ps_list"].append(len(buf)) # packet size
                flows[flow_id]["direction_list"].append(direction)
        
        # piat: packet interarrival time (millisecond)
        for key in flows:
            t0 = np.array([0] + flows[key]["ts_list"], dtype=np.float64)
            t1 = np.array(flows[key]["ts_list"] + [0], dtype=np.float64)
            t = t1 - t0
            t = t[1:-1]
            t = np.hstack((0, t))
            t = t * 1000 # s to ms
            flows[key]["piat_list"] = t.tolist()
        
        output_filepath = os.path.join(output_dir, self._generate_filename(filepath))
        self._save(flows, output_filepath)
        # print(flows)

    def inspect_batch(self, dataset_path:str) -> str:
        """
        """
        logging.info("Analyzer start working. Dataset: {}".format(dataset_path))

        if not os.path.exists(self.output_dir):
            os.makedirs(self.output_dir)
            
        domains = os.listdir(dataset_path)

        for domain in tqdm(domains, ncols=50):
            json_domain_dir = os.path.join(self.output_dir, domain)
            pcap_domain_dir = os.path.join(dataset_path, domain)
            pcap_files = glob.glob(os.path.join(pcap_domain_dir, "**.pcap"))

            if not os.path.exists(json_domain_dir):
                os.makedirs(json_domain_dir)

            for pcap_file in pcap_files:
                try:
                    self.inspect(pcap_file, os.path.join(json_domain_dir))
                except Exception as e:
                    print(str(e))                    
                    logging.error("Failed: {}".format(pcap_file))

        logging.info("Done. Reports were save at {}".format(self.output_dir))

        return json_domain_dir

def main():
    parser = argparse.ArgumentParser(description="Transfrom pcap files to json files that contain\
        timestamps, packet size, direction and packet interarrival time sequences.")

    parser.add_argument(
        "--dataset_path", "-d",
        type=str,
        default="/home/chuen/workspace/encrypted-dns-traffic-analysis/DoH_traffic_dataset"
    )
    # dataset_path = "data/DoH_traffic_dataset_subset"
    args = parser.parse_args()
    a = Analyzer()
    result_dir = a.inspect_batch(args.dataset_path)
    print(result_dir)

if __name__ == "__main__":
    main()
    pass