import os, glob, time, json, random, logging, argparse
from typing import Tuple
import numpy as np
from tqdm import tqdm

# Traffic Obfuscation
class Mixer:
    def __init__(self, config={}):
        t = time.strftime("%Y%m%d%H%M%S", time.localtime())
        self.output_dir = config.get("output_dir", "data/output_of_mixer/{}".format(t))
        self.analyzer_outputfiles_path = config.get("analyzer_outputfiles_path", "data/output_of_analyzer/20220402195214")
        self.inspector_outputfiles_path = config.get("inspector_outputfiles_path", "data/output_of_inspector/20220402203237")
        self.noise_domain_num = config.get("noise_domain_num", 100)
        self.website_ranking_filepath = \
            config.get(
                "noise_domain_filepath", 
                "data/domains/chinaz-ranking-reachable-20220206.txt"
            )
        self.seed = config.get("seed", 7)
        self.log_file = config.get("log_file", "mixer.log")

        self.noise_domains = self._load_domains(self.website_ranking_filepath, self.noise_domain_num)

        random.seed(self.seed)
        logging.basicConfig(
            filename=self.log_file,
            format="%(asctime)s - %(levelname)s - %(message)s ",
            level=logging.INFO
        )

    def _load_domains(self, website_ranking:str, num:int=100) -> list:
        """
        """
        available_domains = os.listdir(self.analyzer_outputfiles_path)
        domains = []
        i = 0

        with open(website_ranking, "r") as f_in:
            for line in f_in.readlines():
                domain = line.strip()
                if domain in available_domains:
                    domains.append(domain)
                    i += 1
                if i == num:break

        return domains

    def _test__load_domains(self):
        print(self._load_domains())

    def mix(self, filepath1:str, filepath2:str, note:str="", delay="default") -> dict:
        """Mix 2 flow togather.

        filepath1 and filepath2 corresponding to inspector's output files.
        The offset of No.i packet is ts_list[i] - ts_list[0].

        New flow is consist of: 
        - offset_list (s)
        - ps_list (byte)
        - direction_list
        - piat_list (ms)
        Please notice that new flow do not have ts_list but offset_list.

        Args:
            filepath1: Flow1's path (Output of inspector).
            filepath2: Flow2's path (Output of inspector).
            delay: "default" or float. If use "default", random delay will be used (0 to 0.1 s).
                delay = flow2's first offset - flow1's firset offset. (s)

        Retuens:
            new_flow.
        """

        if delay == "default":
            delay = random.random() / 10 # [0, 0.1]

        f1, f2 = open(filepath1, "r"), open(filepath2, "r")
        flow_with_id1, flow_with_id2 = json.load(f1), json.load(f2)
        f1.close()
        f2.close()

        flow_id1, flow_id2 = list(flow_with_id1.keys())[0], list(flow_with_id2.keys())[0]
        flow1, flow2 = flow_with_id1[flow_id1], flow_with_id2[flow_id2]

        ts_list1, ts_list2 = flow1["ts_list"], flow2["ts_list"]
        mark1, mark2 = ["1" for _ in range(len(ts_list1))], ["2" for _ in range(len(ts_list1))]
        om1, om2 = [], [] # offset mark

        for ts, mark in zip(ts_list1, mark1):
            om1.append((ts-ts_list1[0], mark))

        for ts, mark in zip(ts_list2, mark2):
            om2.append((ts-ts_list2[0]+delay, mark))

        om = om1 + om2
        om = sorted(om, key=lambda x: x[0])
        offset_list, mark_list = [], []

        for offset, mark in om:
            mark_list.append(mark)
            offset_list.append(offset)

        ps_list, direction_list = [], []
        ps_list1, ps_list2 = flow1["ps_list"], flow2["ps_list"]
        direction_list1, direction_list2 = flow1["direction_list"], flow2["direction_list"]
        i1, i2 = 0, 0

        for mark in mark_list:
            if mark == "1":
                ps_list.append(ps_list1[i1])
                direction_list.append(direction_list1[i1])
                i1 += 1
            else:
                ps_list.append(ps_list2[i2])
                direction_list.append(direction_list2[i2])
                i2 += 1

        t0 = np.array([0] + offset_list, dtype=np.float64)
        t1 = np.array(offset_list + [0], dtype=np.float64)
        t = t1 - t0
        t = t[1:-1]
        t = np.hstack((0, t))
        t = t * 1000 # s to ms
        piat_list = t.tolist()

        new_flow = {
            "offset_list": offset_list,
            "ps_list": ps_list,
            "direction_list": direction_list,
            "piat_list": piat_list,
        }

        if note != "":
            new_flow["note"] = note

        new_flow_with_id = {
            flow_id1: new_flow
        }
        return new_flow_with_id

    def _select(self) -> str:
        """Select a noise domain randomly, return its filepath.
        """
        domain_idx = random.randint(0, self.noise_domain_num-1)
        file_idx = random.randint(0, 99)

        domain = self.noise_domains[domain_idx]
        domain_dir = os.path.join(self.inspector_outputfiles_path, domain)
        filepath = glob.glob(os.path.join(domain_dir, "{}-*.json".format(file_idx)))[0]
        return filepath

    def _get_domain_by_filepath(self, filepath:str):
        """
        Example:
            original_filename: "data/DoH_traffic_dataset_subset/00cha.com/0-00cha.com-20220221112513-0.pcap"
            domain: "00cha.com"

        Args:
            filepath: File path.

        Retuens:
            Domain
        """
        return filepath.rsplit("/", 2)[1]
    
    def create_TO_dataset(self, progress_bar=True):
        """Create Traffic Obfuscation dataset.
        """
        import analyzer
        generate_filename = analyzer.Analyzer()._generate_filename

        directory = self.inspector_outputfiles_path
        ext = "json"
        domains = os.listdir(directory)

        for domain in tqdm(domains, ncols=50, disable=not progress_bar):
            domain_dir = os.path.join(directory, domain)
            files = glob.glob(os.path.join(domain_dir, "*.{}".format(ext)))

            for filepath in files:
                noise_domain = self._select()
                mixed_flow = self.mix(filepath, noise_domain, note=noise_domain)

                output_filename = generate_filename(filepath)
                output_dir= os.path.join(self.output_dir, domain)
                output_filepath = os.path.join(output_dir, output_filename)

                if not os.path.exists(output_dir):
                    os.makedirs(output_dir)

                with open(output_filepath, "w") as f_out:
                    f_out.write(json.dumps(mixed_flow, indent=4))

# Adaptive Packet Insertion
class Injector:
    """
    k: Coefficient that control the probability of injection.
    window_size: Window size (ms).
    mode: "all", "src2dst" or "dst2src"

    Inject strategy description:
    A counter will record the idle time since last packet, when the idle time
    reach n * window_size (ms), injector will inject a padding packet with a probability
    of n * k * window_size at current_timestamp + x ms, where 0 <= x <= window_size. 
    """
    def __init__(self, config={}):
        t = time.strftime("%Y%m%d%H%M%S", time.localtime())
        
        self.inspector_outputfiles_path = \
            config.get("inspector_outputfiles_path", "data/output_of_inspector/20220402203237")
        self.seed = config.get("seed", 7)
        self.log_file = config.get("log_file", "injector.log")
        self.pslb = config.get("pslb", 66)  # packet size lower bound
        self.psub = config.get("psub", 1514) # packet size upper bound
        self.window_size = config.get("window_size", 100) # ms
        self.mode = config.get("mode", "all")
        self.k = config.get("k", 0.001)
        self.max_tail_pn = config.get("max_tail_pn", 10)
        self.add_tail = config.get("add_tail", True)
        # k-pslb-psub-t
        self.output_dir = config.get(
            "output_dir",
            "data/output_of_injector/{}-{}-{}-{}-{}-{}".format(self.window_size, self.k, self.pslb, self.psub, self.mode, t))

        random.seed(self.seed)

        logging.basicConfig(
            filename=self.log_file,
            format="%(asctime)s - %(levelname)s - %(message)s ",
            level=logging.INFO
        )
    
    def _get_subseq_by_direction_list(self, seq:list, direction_list:list, direction:str) -> list:
        """
        Args:
            seq: Original sequence.
            direction: str or int, "src2dst"(0) or "dst2src"(1).

        Retuens:
            Subsequence.
        """
        subseq = []

        if direction == "src2dst": direction = 0
        if direction == "dst2src": direction = 1

        for e, d in zip(seq, direction_list):
            if d == direction:
                subseq.append(e)
        
        if subseq == []:
            subseq = [0]

        return subseq

    def _whether_to_inject(self, n:int) -> bool:
        p = n * self.window_size * self.k
        if random.random() <= p: return True
        return False

    def _inject_time(self, ts:float) -> float:
        window_size_s = self.window_size / 1000
        return ts + (random.random() * window_size_s) # [ts, ts + window_size_s]

    def _inject_ps_list(self, length) -> list:
        padding_ps_list = []
        for i in range(length):
            ps = random.randint(self.pslb, self.psub)
            padding_ps_list.append(ps)
        return padding_ps_list

    def _inject_one_way(self, ts_list:list) -> Tuple[list, list]:
        """
        Returns:
            inject_ts_list, inject_ps_list
        """
        # simulation
        step = 0.01 # 0.001 s        
        window_size_s = self.window_size / 1000
        length = len(ts_list)
        i = 0
        idle_time = 0
        cur_ts = ts_list[0]
        latest_ts = ts_list[0]
        inject_time = -1
        freeze_time = -1
        inject_ts_list = []

        while i < length:
            if ts_list[i] < cur_ts:
                latest_ts = ts_list[i]
                i += 1
            
            if cur_ts < freeze_time:
                cur_ts += step
                continue

            idle_time = cur_ts - latest_ts
            if idle_time >= window_size_s:
                n = idle_time // window_size_s
                do_inject = self._whether_to_inject(n)
                if do_inject:
                    inject_time = self._inject_time(cur_ts)                
                    inject_ts_list.append(inject_time)
                freeze_time = cur_ts + window_size_s

            cur_ts += step

        if self.add_tail:
            tail_ts = ts_list[-1]
            n = random.randint(0, self.max_tail_pn)
            for i in range(n):
                inject_time = self._inject_time(tail_ts)
                inject_ts_list.append(inject_time)
    
        inject_ps_list = self._inject_ps_list(len(inject_ts_list))
        return inject_ts_list, inject_ps_list

    def _create_flow(self, ts_list:list, ps_list:list, direction_list:list) -> dict:
        """
        flow without id
        """
        flow = {}
        flow["ts_list"] = ts_list
        flow["ps_list"] = ps_list
        flow["direction_list"] = direction_list
        t0 = np.array([0] + ts_list, dtype=np.float64)
        t1 = np.array(ts_list + [0], dtype=np.float64)
        t = t1 - t0
        t = t[1:-1]
        t = np.hstack((0, t))
        t = t * 1000 # s to ms
        piat_list = t.tolist()
        flow["piat_list"] = piat_list
        
        return flow

    def _merge_flows(self, flow1:dict, flow2:dict) -> dict:
        ts_list1, ts_list2 = flow1["ts_list"], flow2["ts_list"]

        mark1, mark2 = ["1" for _ in range(len(ts_list1))], ["2" for _ in range(len(ts_list1))]
        tm1, tm2 = [], [] # timestamp & mark

        for ts, mark in zip(ts_list1, mark1):
            tm1.append((ts, mark))

        for ts, mark in zip(ts_list2, mark2):
            tm2.append((ts, mark))

        tm = tm1 + tm2
        tm = sorted(tm, key=lambda x: x[0])
        ts_list, mark_list = [], []

        for ts, mark in tm:
            mark_list.append(mark)
            ts_list.append(ts)

        ps_list, direction_list = [], []
        ps_list1, ps_list2 = flow1["ps_list"], flow2["ps_list"]
        direction_list1, direction_list2 = flow1["direction_list"], flow2["direction_list"]
        i1, i2 = 0, 0

        for mark in mark_list:
            if mark == "1":
                ps_list.append(ps_list1[i1])
                direction_list.append(direction_list1[i1])
                i1 += 1
            else:
                ps_list.append(ps_list2[i2])
                direction_list.append(direction_list2[i2])
                i2 += 1

        t0 = np.array([0] + ts_list, dtype=np.float64)
        t1 = np.array(ts_list + [0], dtype=np.float64)
        t = t1 - t0
        t = t[1:-1]
        t = np.hstack((0, t))
        t = t * 1000 # s to ms
        piat_list = t.tolist()

        merged_flow = {
            "ts_list": ts_list,
            "ps_list": ps_list,
            "direction_list": direction_list,
            "piat_list": piat_list,
        }

        return merged_flow

    def inject(self, filepath:str) -> dict:
        """
        Max packet iterarrival features donate a lot in traffic classification,
        which means shortening interarrival may leads to more misclassification.

        injected_flow_with_id structre (flow_id was omited):

        {
            flow_id: {
                "ts_list": [],
                "ps_list": [],
                "direction_list": [],
                "piat_list": [],
                "note": {
                    "mode": "",
                    "padding_flow": {...}
                }
            }
        }

        Args:
            filepath: File path. (output of inspector)

        Returns:
            injected_flow_with_id
        """
        mode = self.mode
        padding_flow = {}

        with open(filepath, "r") as f_in:
            flow_with_id = json.load(f_in)

        flow_id = list(flow_with_id.keys())[0]
        flow = flow_with_id[flow_id]
        ts_list, ps_list, direction_list, piat_list = \
            flow["ts_list"], flow["ps_list"], flow["direction_list"], flow["piat_list"]
        
        if mode == "src2dst":
            s2d_ts_list = self._get_subseq_by_direction_list(ts_list, direction_list, "src2dst")
            inject_ts_list, inject_ps_list = self._inject_one_way(s2d_ts_list)
            inject_direction_list = [0 for _ in range(len(inject_ts_list))]
            padding_flow = self._create_flow(
                inject_ts_list,
                inject_ps_list,
                inject_direction_list
            )

        elif mode == "dst2src":
            d2s_ts_list = self._get_subseq_by_direction_list(ts_list, direction_list, "dst2src")
            inject_ts_list, inject_ps_list = self._inject_one_way(d2s_ts_list)
            inject_direction_list = [1 for _ in range(len(inject_ts_list))]
            padding_flow = self._create_flow(
                inject_ts_list,
                inject_ps_list,
                inject_direction_list
            )

        if mode == "all":
            s2d_ts_list = self._get_subseq_by_direction_list(ts_list, direction_list, "src2dst")
            d2s_ts_list = self._get_subseq_by_direction_list(ts_list, direction_list, "dst2src")

            s2d_inject_ts_list, s2d_inject_ps_list = self._inject_one_way(s2d_ts_list)
            d2s_inject_ts_list, d2s_inject_ps_list = self._inject_one_way(d2s_ts_list)
            s2d_inject_direction_list = [0 for _ in range(len(s2d_inject_ts_list))]
            d2s_inject_direction_list = [1 for _ in range(len(d2s_inject_ts_list))]

            s2d_padding_flow = self._create_flow(
                s2d_inject_ts_list,
                s2d_inject_ps_list,
                s2d_inject_direction_list
            )
            d2s_padding_flow = self._create_flow(
                d2s_inject_ts_list,
                d2s_inject_ps_list,
                d2s_inject_direction_list
            )
            padding_flow = self._merge_flows(s2d_padding_flow, d2s_padding_flow)

        injected_flow = self._merge_flows(flow, padding_flow)
        injected_flow["note"] = {
            "mode": mode,
            "padding_flow": padding_flow,
        }
        injected_flow_with_id = {
            flow_id: injected_flow
        }

        return injected_flow_with_id

    def create_API_dataset(self) -> str:
        """Create Adaptive Packet Insertion dataset.

        Adaptive Packet Insertion support 3 mode:
        - src2dst: insert packets into flow from client to server
        - dst2src: insert packets into flow from server to client
        - all:     insert packets into flows on both direction

        Althought the actually direction of src2dst or dst2src was depend on the first 
        packet's direction, the client always send packets first, which means "src2dst"
        indicate client to server, while "dst2src" means server to client.

        """
        logging.info("Start. output_dir: {}".format(self.output_dir))
        logging.info(
            "Important settings:  mode: {}, window_size: {}, k: {}, pslb: {}, psub: {}".\
            format(
                self.mode,
                self.window_size,
                self.k,
                self.pslb,
                self.psub
            )
        )

        import analyzer
        generate_filename = analyzer.Analyzer()._generate_filename

        directory = self.inspector_outputfiles_path
        ext = "json"
        domains = os.listdir(directory)

        for domain in tqdm(domains, ncols=50):
            domain_dir = os.path.join(directory, domain)
            files = glob.glob(os.path.join(domain_dir, "*.{}".format(ext)))

            for filepath in files:
                injected_flow = self.inject(filepath)

                output_filename = generate_filename(filepath)
                output_dir= os.path.join(self.output_dir, domain)
                output_filepath = os.path.join(output_dir, output_filename)

                if not os.path.exists(output_dir):
                    os.makedirs(output_dir)

                with open(output_filepath, "w") as f_out:
                    f_out.write(json.dumps(injected_flow, indent=4))

        logging.info("Done.")

        return self.output_dir

class Inspector:
    def __init__(self, config:dict={}):
        t = time.strftime("%Y%m%d%H%M%S", time.localtime())
        self.analyzer_outputfiles_path = \
            config.get("analyzer_outputfiles_path", "data/output_of_analyzer/20220319215128")
        self.output_dir = config.get("output_dir", "data/output_of_inspector/{}".format(t))
    
    def traverse(self, directory:str, ext:str, callback, progress_bar:bool=False, **kwargs):
        """Traverse dirctory, apply callback function to files.

        The deepth of directory is 2 and its structure looks like:

        directory
        ├── domain_1
        │   ├── 0-domain_1-time-0.ext
        │   ├── 1-domain_1-time-0.ext
        │   ...
        │   └── n-domain_1-time-0.ext
        ├── domain_2
        │   ├── 0-domain_2-time-0.ext
        │   ├── 1-domain_2-time-0.ext
        │   ...
        │   └── n-domain_2-time-0.ext
        ...
        └── domain_m
            ├── 0-domain_m-time-0.ext
            ├── 0-domain_m-time-0.ext
            ...
            └── 0-domain_m-time-0.ext

        Args:
            directory: Path to directory.
            ext: Extension, like 'pcap', 'csv', etc.
            args: Arguements 
            callback: Callback function.
            progress_bar: Whether to enable progress bar.
        """
        domains = os.listdir(directory)

        for domain in tqdm(domains, ncols=50, disable=not progress_bar):
            domain_dir = os.path.join(directory, domain)
            files = glob.glob(os.path.join(domain_dir, "**.{}".format(ext)))
            for file in files:
                callback(file, **kwargs)

    def _test_traverse(self):
        directory = "data/DoH_traffic_dataset_subset"

        def show(filepath:str, args):
            print(*args, filepath)

        self.traverse(
            directory=directory,
            ext="pcap",
            args=("This", "file", "is"),
            callback=show,
            progress_bar=False,
        )

    def _get_domain_by_filepath(self, filepath:str):
        """
        Example:
            original_filename: 
                "data/DoH_traffic_dataset_subset/00cha.com/0-00cha.com-20220221112513-0.pcap"
            domain: "00cha.com"

        Args:
            filepath: File path.

        Retuens:
            Domain
        """
        return filepath.rsplit("/", 2)[1]

    def inspect(self, analyzer_outputfiles_path:str="default"):
        """Conducting further process on analyzer's output.

        Ananlyzer extract raw packet sequence information, like 
        - timestamp sequence (ts_list)
        - packet size sequence (ps_list)
        - duration sequence (direction_list)
        - interarrival sequence (piat_list)
        from pcap files

        This function conduct further process on analyzer's output base on SFExtractor's ability.
        To be specific, This function:
        - remove irrelevant flows and keep target flow
        - remove TLS handshake packets (first 10 packets) of target flow
        - remove TCP tear down packets (last 5 packets) of target flow
        """
        import analyzer
        import SFExtractor

        if analyzer_outputfiles_path == "default":
            analyzer_outputfiles_path = self.analyzer_outputfiles_path
        
        get_target_flow= SFExtractor.StatisticalFeatureExtractor().get_target_flow     
        generate_filename = analyzer.Analyzer()._generate_filename
        
        def temp(filepath:str):
            flow = get_target_flow(filepath, return_flow_id=True)
            domain = self._get_domain_by_filepath(filepath)
            output_filename = generate_filename(filepath)
            output_dir= os.path.join(self.output_dir, domain)
            output_filepath = os.path.join(output_dir, output_filename)

            if not os.path.exists(output_dir):
                os.makedirs(output_dir)

            with open(output_filepath, "w") as f_out:
                f_out.write(json.dumps(flow, indent=4))

        self.traverse(
            directory=analyzer_outputfiles_path,
            ext="json",
            callback=temp,
            progress_bar=True,
        )

def test_Injector():
    filepath = \
        "data/output_of_inspector/20220323154723/baike.baidu.com/0-baike.baidu.com-20220213212330-0.json"
    injector = Injector()
    res = injector.inject(filepath)
    print(res)
    
def main():
    parser = argparse.ArgumentParser(description="Generate sequences for anti-analysis research.")

    parser.add_argument(
        "--type","-t",
        type=str,
        help="sequence type, support 'inspector', 'mixer' or 'injector'"
    )

    parser.add_argument(
        "--input_dir", "-i",
        type=str,
    )

    parser.add_argument(
        "--output_dir", "-o",
        type = str,
        default = "data/dataset",
        help = "directory to save dataset (default 'data/dataset')")

    parser.add_argument(
        "--mode","-m",
        type=str,
        help="injector's mode, support 'src2dst', 'dst2src' or 'all'"
    )
    args = parser.parse_args()

    if args.type == "inspector":
        config = {
            "analyzer_outputfiles_path": args.input_dir,
        }
        i = Inspector(config)
        i.inspect()

    elif args.type == "mixer":
        config = {
            "inspector_outputfiles_path": args.input_dir
        }
        m = Mixer(config)
        m.create_TO_dataset()
    elif args.type == "injector":
        config = {
            "inspector_outputfiles_path": args.input_dir,
            "mode": args.mode
        }
        injector = Injector(config)
        injector.create_API_dataset()

if __name__ == "__main__":
    main()
    pass

# python sequence.py -t inspector 
# python sequence.py -t mixer -i data/output_of_inspector/20220323154723
# python sequence.py -t injector -i data/output_of_inspector/20220323154723 -m all