{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "# encoding=utf-8\n",
    "import os.path as osp\n",
    "import os\n",
    "import copy\n",
    "import matplotlib.pyplot as plt\n",
    "import torch\n",
    "from torch.nn import Linear\n",
    "from sklearn.metrics import average_precision_score, roc_auc_score\n",
    "from torch_geometric.data import TemporalData\n",
    "\n",
    "from torch_geometric.nn import TGNMemory, TransformerConv\n",
    "import torch.nn as nn\n",
    "import torch.nn.functional as F\n",
    "from torch_geometric.nn.models.tgn import (LastNeighborLoader, IdentityMessage, MeanAggregator,\n",
    "                                           LastAggregator)\n",
    "from torch_geometric import *\n",
    "from torch_geometric.utils import negative_sampling\n",
    "\n",
    "from tqdm import tqdm\n",
    "# from .autonotebook import tqdm as notebook_tqdm\n",
    "\n",
    "import networkx as nx\n",
    "import numpy as np\n",
    "import math\n",
    "import copy\n",
    "import re\n",
    "import time\n",
    "import json\n",
    "import pandas as pd\n",
    "from random import choice\n",
    "import gc\n",
    "from graphviz import Digraph\n",
    "import xxhash\n",
    "\n",
    "from datetime import datetime, timezone\n",
    "import time\n",
    "import pytz\n",
    "from time import mktime\n",
    "from datetime import datetime\n",
    "import time\n",
    "\n",
    "\n",
    "from rich.progress import Progress\n",
    "from rich.progress import (\n",
    "    BarColumn,\n",
    "    DownloadColumn,\n",
    "    Progress,\n",
    "    SpinnerColumn,\n",
    "    TaskProgressColumn,\n",
    "    TimeElapsedColumn,\n",
    "    TimeRemainingColumn,\n",
    ")\n",
    "\n",
    "\n",
    "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n",
    "\n",
    "\n",
    "def hashgen(l):\n",
    "    \"\"\"Generate a single hash value from a list. @l is a list of\n",
    "    string values, which can be properties of a node/edge. This\n",
    "    function returns a single hashed integer value.\"\"\"\n",
    "    hasher = xxhash.xxh64()\n",
    "    for e in l:\n",
    "        hasher.update(e)\n",
    "    return hasher.intdigest()\n",
    "\n",
    "\n",
    "def datetime_to_ns_time(date):\n",
    "    \"\"\"\n",
    "    :param date: str   format: %Y-%m-%d %H:%M:%S   e.g. 2013-10-10 23:40:00\n",
    "    :return: nano timestamp\n",
    "    \"\"\"\n",
    "    date,ns=date.split('.')\n",
    "\n",
    "    timeArray = time.strptime(date, '%Y-%m-%dT%H:%M:%S')\n",
    "    timeStamp = int(time.mktime(timeArray))\n",
    "    timeStamp = timeStamp * 1000000000\n",
    "    timeStamp += int(ns.split('Z')[0])\n",
    "    return timeStamp\n",
    "\n",
    "\n",
    "def datetime_to_timestamp_US(date):\n",
    "    \"\"\"\n",
    "    :param date: str   format: %Y-%m-%d %H:%M:%S   e.g. 2013-10-10 23:40:00\n",
    "    :return: nano timestamp\n",
    "    \"\"\"\n",
    "    date=date.replace('-04:00','')\n",
    "    if '.' in date:\n",
    "        date,ms=date.split('.')\n",
    "    else:\n",
    "        ms=0\n",
    "    tz = pytz.timezone('Etc/GMT+4')\n",
    "    timeArray = time.strptime(date, \"%Y-%m-%dT%H:%M:%S\")\n",
    "    dt = datetime.fromtimestamp(mktime(timeArray))\n",
    "    timestamp = tz.localize(dt)\n",
    "    timestamp=timestamp.timestamp()\n",
    "    timeStamp = timestamp*1000+int(ms)\n",
    "    return int(timeStamp)\n",
    "\n",
    "\n",
    "def timestamp_to_datetime_US(ns):\n",
    "    \"\"\"\n",
    "    :param date: str   format: %Y-%m-%d %H:%M:%S   e.g. 2013-10-10 23:40:00\n",
    "    :return: nano timestamp\n",
    "    \"\"\"\n",
    "    tz = pytz.timezone('US/Eastern')\n",
    "    ms=ns%1000\n",
    "    ns/=1000\n",
    "    dt = pytz.datetime.datetime.fromtimestamp(int(ns), tz)\n",
    "    s = dt.strftime('%Y-%m-%d %H:%M:%S')\n",
    "    s+='.'+str(ms)\n",
    "#     s += '.' + str(int(int(ns) % 1000000000)).zfill(9)\n",
    "    return s\n",
    "\n",
    "pid_split_symble=\"#_\"\n",
    "\n",
    "host_split_symble=\"_@\"\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Database setting (Make sure the database and tables are created)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "import psycopg2\n",
    "\n",
    "from psycopg2 import extras as ex\n",
    "connect = psycopg2.connect(database = 'optc_db',\n",
    "                           host = '/var/run/postgresql/',\n",
    "                           user = 'postgres',\n",
    "                           password = 'postgres',\n",
    "                           port = '5432'\n",
    "                          )\n",
    "\n",
    "cur = connect.cursor()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Clear all data in the database. Run it carefully!"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 161,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "None\n"
     ]
    }
   ],
   "source": [
    "tt=cur.execute(\"\"\"\n",
    "    delete from event_table where 1=1;\n",
    "\"\"\")\n",
    "print(tt)\n",
    "connect.commit()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 162,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "None\n"
     ]
    }
   ],
   "source": [
    "tt=cur.execute(\"\"\"\n",
    "    delete from nodeid2msg where 1=1;\n",
    "\"\"\")\n",
    "print(tt)\n",
    "connect.commit()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Parse data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "reverse_edge_type=[\n",
    "    \"READ\",\n",
    "]\n",
    "\n",
    "\n",
    "\n",
    "node_type_used=[\n",
    "    'FILE',\n",
    " 'FLOW',\n",
    " 'PROCESS',\n",
    "#  'SHELL',\n",
    "]\n",
    "\n",
    "def process_raw_dic(raw_dic):\n",
    "    ans_dic={}\n",
    "    \n",
    "    \n",
    "    ans_dic['hostname']=raw_dic['hostname'].split('.')[0]\n",
    "    \n",
    "    ans_dic['edge_type']=raw_dic['action']\n",
    "    ans_dic['src_id']=raw_dic['actorID']\n",
    "    ans_dic['dst_id']=raw_dic['objectID']\n",
    "    \n",
    "    ans_dic['src_type']='PROCESS'\n",
    "    ans_dic['timestamp']=datetime_to_timestamp_US(raw_dic['timestamp'])\n",
    "    ans_dic['dst_type']=raw_dic['object']\n",
    "    \n",
    "    try:\n",
    "        node_uuid2path[ans_dic['src_id']]=ans_dic['hostname']+host_split_symble+raw_dic['properties']['image_path']  \n",
    "        \n",
    "    \n",
    "        if raw_dic['object']=='FLOW':\n",
    "            temp_flow=f\"{raw_dic['properties']['direction']}#{raw_dic['properties']['src_ip']}:{raw_dic['properties']['src_port']}->{raw_dic['properties']['dest_ip']}:{raw_dic['properties']['dest_port']}\"\n",
    "            node_uuid2path[ans_dic['dst_id']]=ans_dic['hostname']+host_split_symble+temp_flow\n",
    "\n",
    "        if raw_dic['object']=='FILE':              \n",
    "            node_uuid2path[ans_dic['dst_id']]=ans_dic['hostname']+host_split_symble+raw_dic['properties']['file_path']\n",
    "\n",
    "\n",
    "    except:\n",
    "        ans_dic={}\n",
    "    \n",
    "    return ans_dic"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 58,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'FILE',\n",
       " 'FLOW',\n",
       " 'MODULE',\n",
       " 'PROCESS',\n",
       " 'REGISTRY',\n",
       " 'SHELL',\n",
       " 'TASK',\n",
       " 'THREAD',\n",
       " 'USER_SESSION'}"
      ]
     },
     "execution_count": 58,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "node_type={'FILE',\n",
    " 'FLOW',\n",
    " 'MODULE',\n",
    " 'PROCESS',\n",
    " 'REGISTRY',\n",
    " 'SHELL',\n",
    " 'TASK',\n",
    " 'THREAD',\n",
    " 'USER_SESSION'}"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Unzip data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 51,
   "metadata": {},
   "outputs": [],
   "source": [
    "from os import walk\n",
    " \n",
    "# folder path\n",
    "dir_path = '/home/monk/datasets/OpTC_data/'\n",
    " \n",
    "# list to store files name\n",
    "res = []\n",
    "for (dir_path, dir_names, file_names) in walk(dir_path):\n",
    "    if dir_path[-1]!='/':\n",
    "        dir_path+='/'\n",
    "#     print(f\"{dir_path=}\")\n",
    "#     print(f\"{file_names=}\")\n",
    "    for f in file_names:\n",
    "        temp_file_path=dir_path+f\n",
    "#         print(f\"{temp_file_path=}\")\n",
    "     \n",
    "        res.append(temp_file_path)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 84%|████████▍ | 118/140 [00:09<00:01, 12.58it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " /home/monk/datasets/OpTC_data/ecar/evaluation/23Sep-night/AIA-401-425/AIA-401-425.ecar-last.json.gz Finished！\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 85%|████████▌ | 119/140 [08:00<01:59,  5.71s/it]gzip: /home/monk/datasets/OpTC_data/ecar/evaluation/23Sep-night/AIA-51-75/AIA-51-75.ecar-last.json.gz: No such file or directory\n",
      "100%|██████████| 140/140 [08:00<00:00,  3.43s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      " /home/monk/datasets/OpTC_data/ecar/evaluation/23Sep-night/AIA-401-425/AIA-401-425.ecar-2019-12-08T04-06-31.326.json.gz Finished！\n",
      " /home/monk/datasets/OpTC_data/ecar/evaluation/23Sep-night/AIA-51-75/AIA-51-75.ecar-last.json.gz Finished！\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n"
     ]
    }
   ],
   "source": [
    "for r in tqdm(res):\n",
    "    if (\"201-225\" in r or \"401-425\" in r or \"651-675\" in r or \"501-525\" in r or \"51-75\" in r) and \".gz\" in r:\n",
    "        os.system(f\"gzip -d {r}\")\n",
    "        print(f\" {r} Finished！\")"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Process the features of nodes and edges"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Edge features"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [],
   "source": [
    "# edge types\n",
    "edge_set=['OPEN',\n",
    "'READ',\n",
    "'CREATE',\n",
    "'MESSAGE',\n",
    "'MODIFY',\n",
    "'START',\n",
    "'RENAME',\n",
    "'DELETE',\n",
    "'TERMINATE',\n",
    "'WRITE',]\n",
    "\n",
    "# Generate edge type one-hot\n",
    "edgevec=torch.nn.functional.one_hot(torch.arange(0, len(edge_set)), num_classes=len(edge_set))\n",
    "\n",
    "\n",
    "edge2vec={}\n",
    "for e in range(len(edge_set)):\n",
    "    edge2vec[edge_set[e]]=edgevec[e]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'OPEN': tensor([1, 0, 0, 0, 0, 0, 0, 0, 0, 0]),\n",
       " 'READ': tensor([0, 1, 0, 0, 0, 0, 0, 0, 0, 0]),\n",
       " 'CREATE': tensor([0, 0, 1, 0, 0, 0, 0, 0, 0, 0]),\n",
       " 'MESSAGE': tensor([0, 0, 0, 1, 0, 0, 0, 0, 0, 0]),\n",
       " 'MODIFY': tensor([0, 0, 0, 0, 1, 0, 0, 0, 0, 0]),\n",
       " 'START': tensor([0, 0, 0, 0, 0, 1, 0, 0, 0, 0]),\n",
       " 'RENAME': tensor([0, 0, 0, 0, 0, 0, 1, 0, 0, 0]),\n",
       " 'DELETE': tensor([0, 0, 0, 0, 0, 0, 0, 1, 0, 0]),\n",
       " 'TERMINATE': tensor([0, 0, 0, 0, 0, 0, 0, 0, 1, 0]),\n",
       " 'WRITE': tensor([0, 0, 0, 0, 0, 0, 0, 0, 0, 1])}"
      ]
     },
     "execution_count": 17,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "edge2vec"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [],
   "source": [
    "rel2id={}\n",
    "index=1\n",
    "for i in edge_set:\n",
    "    rel2id[index]=i\n",
    "    rel2id[i]=index\n",
    "    index+=1"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{1: 'OPEN',\n",
       " 'OPEN': 1,\n",
       " 2: 'READ',\n",
       " 'READ': 2,\n",
       " 3: 'CREATE',\n",
       " 'CREATE': 3,\n",
       " 4: 'MESSAGE',\n",
       " 'MESSAGE': 4,\n",
       " 5: 'MODIFY',\n",
       " 'MODIFY': 5,\n",
       " 6: 'START',\n",
       " 'START': 6,\n",
       " 7: 'RENAME',\n",
       " 'RENAME': 7,\n",
       " 8: 'DELETE',\n",
       " 'DELETE': 8,\n",
       " 9: 'TERMINATE',\n",
       " 'TERMINATE': 9,\n",
       " 10: 'WRITE',\n",
       " 'WRITE': 10}"
      ]
     },
     "execution_count": 19,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "rel2id"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Node features"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [],
   "source": [
    "from sklearn.feature_extraction import FeatureHasher\n",
    "from torch_geometric.transforms import NormalizeFeatures\n",
    "\n",
    "from sklearn import preprocessing\n",
    "import numpy as np\n",
    "\n",
    "\n",
    "encode_len=16\n",
    "\n",
    "FH_string=FeatureHasher(n_features=encode_len,input_type=\"string\")\n",
    "FH_dict=FeatureHasher(n_features=encode_len,input_type=\"dict\")\n",
    "\n",
    "\n",
    "def path2higlist(p):\n",
    "    l=[]\n",
    "    spl=p.strip().split('/')\n",
    "    for i in spl:\n",
    "        if len(l)!=0:\n",
    "            l.append(l[-1]+'/'+i)\n",
    "        else:\n",
    "            l.append(i)\n",
    "#     print(l)\n",
    "    return l\n",
    "\n",
    "def ip2higlist(p):\n",
    "    l=[]\n",
    "    if \"::\" not in p:\n",
    "        spl=p.strip().split('.')\n",
    "        for i in spl:\n",
    "            if len(l)!=0:\n",
    "                l.append(l[-1]+'.'+i)\n",
    "            else:\n",
    "                l.append(i)\n",
    "    #     print(l)\n",
    "        return l\n",
    "    else:\n",
    "        spl=p.strip().split(':')\n",
    "        for i in spl:\n",
    "            if len(l)!=0:\n",
    "                l.append(l[-1]+':'+i)\n",
    "            else:\n",
    "                l.append(i)\n",
    "    #     print(l)\n",
    "        return l\n",
    "def list2str(l):\n",
    "    s=''\n",
    "    for i in l:\n",
    "        s+=i\n",
    "    return s\n",
    "\n",
    "def str2tensor(msg_type,msg):\n",
    "    if msg_type == 'FLOW':\n",
    "        h_msg=list2str(ip2higlist(msg))\n",
    "    else:\n",
    "        h_msg=list2str(path2higlist(msg))\n",
    "    vec=FH_string.transform([msg_type+h_msg]).toarray()\n",
    "    vec=torch.tensor(vec).reshape(encode_len).float()\n",
    "#     print(h_msg)\n",
    "    return vec\n",
    "\n",
    "\n",
    "class TimeEncoder(torch.nn.Module):\n",
    "    def __init__(self, out_channels):\n",
    "        super().__init__()\n",
    "        self.out_channels = out_channels\n",
    "        self.lin = Linear(1, out_channels)\n",
    "\n",
    "    def reset_parameters(self):\n",
    "        self.lin.reset_parameters()\n",
    "\n",
    "    def forward(self, t):\n",
    "        return self.lin(t.view(-1, 1)).cos()\n",
    "    \n",
    "time_enc=TimeEncoder(50)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Store the benign data to database"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 141,
   "metadata": {},
   "outputs": [],
   "source": [
    "node_uuid2path={}"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 163,
   "metadata": {},
   "outputs": [],
   "source": [
    "from os import walk\n",
    " \n",
    "# folder path\n",
    "dir_path = '/home/monk/datasets/OpTC_data/ecar/benign/'\n",
    "\n",
    "res = []\n",
    "for (dir_path, dir_names, file_names) in walk(dir_path):\n",
    "    if dir_path[-1]!='/':\n",
    "        dir_path+='/'\n",
    "#     print(f\"{dir_path=}\")\n",
    "#     print(f\"{file_names=}\")\n",
    "    for f in file_names:\n",
    "        temp_file_path=dir_path+f\n",
    "#         print(f\"{temp_file_path=}\")\n",
    "        if \"201-225\" in temp_file_path or (\"20-23Sep19\" in temp_file_path and (\"401-425\" in temp_file_path or \"651-675\" in temp_file_path or \"501-525\" in temp_file_path or \"51-75\" in temp_file_path)):\n",
    "            res.append(temp_file_path)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 165,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 36/36 [00:00<00:00, 217885.92it/s]\n"
     ]
    }
   ],
   "source": [
    "for r in tqdm(res):\n",
    "    if  \".gz\" in r:\n",
    "        os.system(f\"gzip -d {r}\")\n",
    "        print(f\" {r} Finished！\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 166,
   "metadata": {},
   "outputs": [],
   "source": [
    "def is_selected_hosts(line):\n",
    "    hosts=[\n",
    "        'SysClient0201',\n",
    "        'SysClient0402',\n",
    "        'SysClient0660',\n",
    "        'SysClient0501',\n",
    "        'SysClient0051',        \n",
    "        'SysClient0209',\n",
    "    ]\n",
    "    flag=False\n",
    "    for h in hosts:\n",
    "        if h in line:\n",
    "            flag=True\n",
    "            break\n",
    "    return flag"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 167,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "6190907it [00:52, 117864.83it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=403686\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/19Sep19/AIA-201-225/AIA-201-225.ecar-2019-12-07T16-16-05.667.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "23189497it [03:19, 116178.24it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=1744136\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/19Sep19/AIA-201-225/AIA-201-225.ecar-last.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "75798918it [11:06, 113647.49it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=6295186\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/17-18Sep19/AIA-201-225/AIA-201-225.ecar-2019-12-07T06-00-00.251.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "43174191it [06:17, 114429.39it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=3393220\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/17-18Sep19/AIA-201-225/AIA-201-225.ecar-last.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "75743706it [10:47, 116912.71it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=5575885\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/17-18Sep19/AIA-201-225/AIA-201-225.ecar-2019-12-07T01-57-49.366.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "76469610it [11:02, 115448.03it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=5944960\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-201-225/AIA-201-225.ecar-2019-12-08T05-46-21.658.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "74706481it [10:50, 114796.44it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=5727218\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-201-225/AIA-201-225.ecar-last.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "76495083it [11:08, 114409.13it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=5879664\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-201-225/AIA-201-225.ecar-2019-12-08T01-57-30.012.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "76648515it [10:53, 117366.10it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=5027745\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-201-225/AIA-201-225.ecar-2019-12-07T22-06-33.589.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "53321795it [08:14, 107837.15it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=4793249\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-201-225/AIA-201-225.ecar-2019-12-07T19-16-05.788.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "76389825it [09:59, 127440.89it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=2990117\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-401-425/AIA-401-425.ecar-2019-12-07T12-19-23.521.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "76383303it [10:05, 126104.39it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=3024043\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-401-425/AIA-401-425.ecar-2019-12-07T20-18-48.097.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "21076091it [02:40, 131219.36it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=480749\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-401-425/AIA-401-425.ecar-2019-12-07T06-28-53.370.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "77020972it [10:14, 125295.72it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=3421514\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-401-425/AIA-401-425.ecar-2019-12-07T08-33-35.028.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "53285475it [06:58, 127359.29it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=2101814\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-401-425/AIA-401-425.ecar-last.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "76407480it [10:01, 126927.69it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=3002918\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-401-425/AIA-401-425.ecar-2019-12-07T16-09-39.085.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "76699983it [10:06, 126554.89it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=2957067\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-501-525/AIA-501-525.ecar-2019-11-15T09-43-35.856.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "20541672it [02:41, 127072.38it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=785228\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-501-525/AIA-501-525.ecar-last.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "76719146it [10:06, 126492.48it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=2876893\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-501-525/AIA-501-525.ecar-2019-11-15T13-29-59.064.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "78048775it [10:34, 122970.27it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=4039788\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-501-525/AIA-501-525.ecar-2019-11-15T05-59-37.208.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "76706216it [10:09, 125822.22it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=2965137\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-501-525/AIA-501-525.ecar-2019-11-15T17-22-42.923.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "36440311it [04:38, 130781.02it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=843401\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-501-525/AIA-501-525.ecar-2019-11-15T03-10-00.546.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "27870737it [03:36, 129799.40it/s]IOPub message rate exceeded.\n",
      "The notebook server will temporarily stop sending output\n",
      "to the client in order to avoid crashing it.\n",
      "To change this limit, set the config variable\n",
      "`--NotebookApp.iopub_msg_rate_limit`.\n",
      "\n",
      "Current values:\n",
      "NotebookApp.iopub_msg_rate_limit=1000.0 (msgs/sec)\n",
      "NotebookApp.rate_limit_window=3.0 (secs)\n",
      "\n",
      "32029474it [04:09, 128412.38it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=1099540\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-651-675/AIA-651-675.ecar-last.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "67353196it [08:46, 128610.01it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-651-675/AIA-651-675.ecar-2019-11-15T09-37-46.741.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "36333460it [04:42, 128817.97it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=1022401\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-651-675/AIA-651-675.ecar-2019-11-15T03-09-38.187.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "4928320it [00:39, 131010.08it/s]IOPub message rate exceeded.\n",
      "The notebook server will temporarily stop sending output\n",
      "to the client in order to avoid crashing it.\n",
      "To change this limit, set the config variable\n",
      "`--NotebookApp.iopub_msg_rate_limit`.\n",
      "\n",
      "Current values:\n",
      "NotebookApp.iopub_msg_rate_limit=1000.0 (msgs/sec)\n",
      "NotebookApp.rate_limit_window=3.0 (secs)\n",
      "\n",
      "77786670it [10:14, 126661.46it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=3133116\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-651-675/AIA-651-675.ecar-2019-11-15T05-48-17.579.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "76458599it [09:57, 127872.31it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=2610952\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-651-675/AIA-651-675.ecar-2019-11-15T13-28-16.876.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "76425447it [09:57, 127944.05it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=2617847\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-651-675/AIA-651-675.ecar-2019-11-15T17-26-42.298.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "42840935it [05:37, 126794.11it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=1616861\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-51-75/AIA-51-75.ecar-last.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "25224989it [03:10, 132316.13it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=432115\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-51-75/AIA-51-75.ecar-2019-12-07T16-15-43.163.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "76638913it [10:20, 123463.28it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=3417179\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-51-75/AIA-51-75.ecar-2019-12-07T18-18-31.331.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "76421197it [10:02, 126783.69it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=2824549\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-51-75/AIA-51-75.ecar-2019-12-08T04-30-36.852.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "76427896it [10:08, 125601.25it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=2892230\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-51-75/AIA-51-75.ecar-2019-12-07T21-31-30.259.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "76404717it [10:04, 126461.60it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=2919561\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/20-23Sep19/AIA-51-75/AIA-51-75.ecar-2019-12-08T00-56-58.175.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "32326513it [04:44, 113775.54it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=2449767\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/18-19Sep19/AIA-201-225/AIA-201-225.ecar-2019-12-07T10-37-17.942.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "69051137it [10:04, 114210.92it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=4978166\n",
      "/home/monk/datasets/OpTC_data/ecar/benign/18-19Sep19/AIA-201-225/AIA-201-225.ecar-last.json Finished!\n"
     ]
    }
   ],
   "source": [
    "for file_path in res:\n",
    "    \n",
    "    edge_list=[]\n",
    "\n",
    "    with open(file_path) as f:\n",
    "        for line in tqdm(f):\n",
    "            line=line.replace('\\\\\\\\','/')\n",
    "            temp_dic=json.loads(line.strip())\n",
    "            hostname=temp_dic['hostname'].split('.')[0]\n",
    "            if temp_dic['object'] in node_type_used and is_selected_hosts(hostname):\n",
    "                edge_list.append(process_raw_dic(temp_dic))\n",
    "    \n",
    "        print(f'{len(edge_list)=}')\n",
    "        data_list=[]\n",
    "        for e in edge_list:\n",
    "            try:\n",
    "                data_list.append([\n",
    "                    e['src_id'],\n",
    "                    e['src_type'],\n",
    "                    e['edge_type'],\n",
    "                    e['dst_id'],\n",
    "                    e['dst_type'],\n",
    "                    e['hostname'],\n",
    "                    e['timestamp'],\n",
    "                    \"benign\",\n",
    "                ])\n",
    "            except:\n",
    "                pass\n",
    "\n",
    "        # write to database\n",
    "        sql = '''insert into event_table\n",
    "                             values %s\n",
    "                '''\n",
    "        ex.execute_values(cur,sql, data_list,page_size=10000)\n",
    "        connect.commit()\n",
    "        \n",
    "        print(f\"{file_path} Finished! \")\n",
    "        # Clear the tmp variables to release the memory.\n",
    "        del edge_list\n",
    "        del data_list"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Store the evaluation data to database"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 168,
   "metadata": {},
   "outputs": [],
   "source": [
    "from os import walk\n",
    " \n",
    "# folder path\n",
    "dir_path = '/home/monk/datasets/OpTC_data/ecar/evaluation/'\n",
    "\n",
    "res = []\n",
    "for (dir_path, dir_names, file_names) in walk(dir_path):\n",
    "    if dir_path[-1]!='/':\n",
    "        dir_path+='/'\n",
    "    for f in file_names:\n",
    "        temp_file_path=dir_path+f\n",
    "#         print(f\"{temp_file_path=}\")\n",
    "        if (\"201-225\" in temp_file_path or \"401-425\" in temp_file_path or \"651-675\" in temp_file_path or \"501-525\" in temp_file_path or \"51-75\" in temp_file_path):\n",
    "            res.append(temp_file_path)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 170,
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 35/35 [00:00<00:00, 231546.75it/s]\n"
     ]
    }
   ],
   "source": [
    "for r in tqdm(res):\n",
    "    if  \".gz\" in r:\n",
    "        os.system(f\"gzip -d {r}\")\n",
    "        print(f\" {r} Finished！\")"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 171,
   "metadata": {},
   "outputs": [],
   "source": [
    "def is_selected_hosts(line):\n",
    "    hosts=[\n",
    "        'SysClient0201',\n",
    "        'SysClient0402',\n",
    "        'SysClient0660',\n",
    "        'SysClient0501',\n",
    "        'SysClient0051',        \n",
    "        'SysClient0207',\n",
    "    ]\n",
    "    flag=False\n",
    "    for h in hosts:\n",
    "        if h in line:\n",
    "            flag=True\n",
    "            break\n",
    "    return flag"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 172,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "27031942it [03:56, 114332.80it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=1839580\n",
      "/home/monk/datasets/OpTC_data/ecar/evaluation/25Sept/AIA-201-225/AIA-201-225.ecar-last.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "27461034it [03:38, 125861.01it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=1049439\n",
      "/home/monk/datasets/OpTC_data/ecar/evaluation/25Sept/AIA-401-425/AIA-401-425.ecar-last.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "13921814it [01:51, 124427.07it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=586663\n",
      "/home/monk/datasets/OpTC_data/ecar/evaluation/25Sept/AIA-501-525/AIA-501-525.ecar-last.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "10781277it [01:28, 122133.90it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=493144\n",
      "/home/monk/datasets/OpTC_data/ecar/evaluation/25Sept/AIA-501-525/AIA-501-525.ecar-2019-11-17T15-04-02.073.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "24895771it [03:18, 125135.41it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=1035074\n",
      "/home/monk/datasets/OpTC_data/ecar/evaluation/25Sept/AIA-651-675/AIA-651-675.ecar-last.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "333939it [00:02, 122523.21it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=11430\n",
      "/home/monk/datasets/OpTC_data/ecar/evaluation/25Sept/AIA-651-675/AIA-651-675.ecar-2019-11-17T14-50-25.754.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "25186287it [03:20, 125324.71it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=1053016\n",
      "/home/monk/datasets/OpTC_data/ecar/evaluation/25Sept/AIA-51-75/AIA-51-75.ecar-last.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "46514809it [06:44, 115128.05it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=3497340\n",
      "/home/monk/datasets/OpTC_data/ecar/evaluation/24Sep19/AIA-201-225/AIA-201-225.ecar-last.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "49333875it [07:04, 116215.81it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=3544607\n",
      "/home/monk/datasets/OpTC_data/ecar/evaluation/24Sep19/AIA-201-225/AIA-201-225.ecar-2019-12-08T17-41-18.327.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "73190980it [09:43, 125398.84it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=2825428\n",
      "/home/monk/datasets/OpTC_data/ecar/evaluation/24Sep19/AIA-401-425/AIA-401-425.ecar-2019-12-08T07-35-11.579.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "23196562it [03:03, 126291.00it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=878620\n",
      "/home/monk/datasets/OpTC_data/ecar/evaluation/24Sep19/AIA-401-425/AIA-401-425.ecar-last.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "30125538it [04:00, 125077.08it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=1274143\n",
      "/home/monk/datasets/OpTC_data/ecar/evaluation/24Sep19/AIA-501-525/AIA-501-525.ecar-2019-11-17T04-01-58.625.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "65758362it [08:39, 126598.40it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=2554194\n",
      "/home/monk/datasets/OpTC_data/ecar/evaluation/24Sep19/AIA-501-525/AIA-501-525.ecar-last.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "76274003it [09:59, 127294.77it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=2829980\n",
      "/home/monk/datasets/OpTC_data/ecar/evaluation/24Sep19/AIA-651-675/AIA-651-675.ecar-last.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "16626832it [02:09, 125717.07it/s]IOPub message rate exceeded.\n",
      "The notebook server will temporarily stop sending output\n",
      "to the client in order to avoid crashing it.\n",
      "To change this limit, set the config variable\n",
      "`--NotebookApp.iopub_msg_rate_limit`.\n",
      "\n",
      "Current values:\n",
      "NotebookApp.iopub_msg_rate_limit=1000.0 (msgs/sec)\n",
      "NotebookApp.rate_limit_window=3.0 (secs)\n",
      "\n",
      "32570231it [04:15, 127691.42it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=1194622\n",
      "/home/monk/datasets/OpTC_data/ecar/evaluation/23Sep19-red/AIA-501-525/AIA-501-525.ecar-last.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "9446302it [01:15, 139405.96it/s]IOPub message rate exceeded.\n",
      "The notebook server will temporarily stop sending output\n",
      "to the client in order to avoid crashing it.\n",
      "To change this limit, set the config variable\n",
      "`--NotebookApp.iopub_msg_rate_limit`.\n",
      "\n",
      "Current values:\n",
      "NotebookApp.iopub_msg_rate_limit=1000.0 (msgs/sec)\n",
      "NotebookApp.rate_limit_window=3.0 (secs)\n",
      "\n",
      "66585106it [08:43, 127177.30it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=2522019\n",
      "/home/monk/datasets/OpTC_data/ecar/evaluation/23Sep-night/AIA-401-425/AIA-401-425.ecar-2019-12-08T04-06-31.326.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "3341987it [00:27, 123741.03it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=125517\n",
      "/home/monk/datasets/OpTC_data/ecar/evaluation/23Sep-night/AIA-401-425/AIA-401-425.ecar-last.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "13193899it [01:44, 129702.70it/s]IOPub message rate exceeded.\n",
      "The notebook server will temporarily stop sending output\n",
      "to the client in order to avoid crashing it.\n",
      "To change this limit, set the config variable\n",
      "`--NotebookApp.iopub_msg_rate_limit`.\n",
      "\n",
      "Current values:\n",
      "NotebookApp.iopub_msg_rate_limit=1000.0 (msgs/sec)\n",
      "NotebookApp.rate_limit_window=3.0 (secs)\n",
      "\n",
      "69126143it [09:13, 124996.24it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=2654927\n",
      "/home/monk/datasets/OpTC_data/ecar/evaluation/23Sep-night/AIA-51-75/AIA-51-75.ecar-last.json Finished!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "943962it [00:07, 130731.27it/s]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(edge_list)=14520\n",
      "/home/monk/datasets/OpTC_data/ecar/evaluation/23Sep-night/AIA-51-75/AIA-51-75.ecar-2019-12-08T10-19-52.584.json Finished!\n"
     ]
    }
   ],
   "source": [
    "for file_path in res:\n",
    "    \n",
    "    edge_list=[]\n",
    "\n",
    "    with open(file_path) as f:\n",
    "        for line in tqdm(f):\n",
    "            line=line.replace('\\\\\\\\','/')\n",
    "            temp_dic=json.loads(line.strip())\n",
    "            hostname=temp_dic['hostname'].split('.')[0]\n",
    "            if temp_dic['object'] in node_type_used and is_selected_hosts(hostname):\n",
    "                edge_list.append(process_raw_dic(temp_dic))\n",
    "    \n",
    "        print(f'{len(edge_list)=}')\n",
    "        data_list=[]\n",
    "        for e in edge_list:\n",
    "            try:\n",
    "                data_list.append([\n",
    "                    e['src_id'],\n",
    "                    e['src_type'],\n",
    "                    e['edge_type'],\n",
    "                    e['dst_id'],\n",
    "                    e['dst_type'],\n",
    "                    e['hostname'],\n",
    "                    e['timestamp'],\n",
    "                    \"evaluation\",\n",
    "                ])\n",
    "            except:\n",
    "                pass\n",
    "\n",
    "        sql = '''insert into event_table\n",
    "                             values %s\n",
    "                '''\n",
    "        ex.execute_values(cur,sql, data_list,page_size=10000)\n",
    "        connect.commit()\n",
    "        \n",
    "        print(f\"{file_path} Finished! \")\n",
    "        # Clear the tmp variables to release the memory.\n",
    "        del edge_list\n",
    "        del data_list"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Store the node data into database"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 173,
   "metadata": {},
   "outputs": [],
   "source": [
    "data_list=[]\n",
    "for n in node_uuid2path:\n",
    "    try:\n",
    "        data_list.append([\n",
    "            n,\n",
    "             node_uuid2path[n]\n",
    "        ])\n",
    "    except:\n",
    "        pass\n",
    "    \n",
    "\n",
    "sql = '''insert into nodeid2msg\n",
    "                     values %s\n",
    "        '''\n",
    "ex.execute_values(cur,sql, data_list,page_size=10000)\n",
    "connect.commit()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 174,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "18965643"
      ]
     },
     "execution_count": 174,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "len(node_uuid2path)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Load node data from database"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 18965643/18965643 [00:15<00:00, 1238032.13it/s]\n"
     ]
    }
   ],
   "source": [
    "# Construct the map between nodeid and msg\n",
    "sql=\"select * from nodeid2msg;\"\n",
    "cur.execute(sql)\n",
    "rows = cur.fetchall()\n",
    "\n",
    "node_uuid2path={}  # nodeid => msg      node hash => nodeid\n",
    "for i in tqdm(rows):\n",
    "    node_uuid2path[i[0]]=i[1]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Generate the benign datasets"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## h402  22"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 54,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      "  0%|          | 0/1 [00:00<?, ?it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=4410529\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 1/1 [20:03<00:00, 1203.10s/it]\n"
     ]
    }
   ],
   "source": [
    "for day in tqdm(range(22,23)):\n",
    "    start_timestamp=datetime_to_timestamp_US('2019-09-'+str(day)+'T00:00:00')\n",
    "    end_timestamp=datetime_to_timestamp_US('2019-09-'+str(day+1)+'T00:00:00')\n",
    "    hostname='SysClient0402'\n",
    "    datalabel='benign'\n",
    "    sql=f\"\"\"\n",
    "    select * from event_table\n",
    "    where\n",
    "          timestamp>{start_timestamp} and timestamp<{end_timestamp}\n",
    "          and hostname='{hostname}' and data_label='{datalabel}' ORDER BY timestamp;\n",
    "    \"\"\"\n",
    "    cur.execute(sql)\n",
    "    events = cur.fetchall()\n",
    "    print(f\"{len(events)=}\")\n",
    "    \n",
    "    \n",
    "    \n",
    "    node_set=set()\n",
    "    node_uuid2index={}\n",
    "    temp_index=0\n",
    "    for e in events:\n",
    "        if e[3] not in node_uuid2path or e[0]  not in node_uuid2path:\n",
    "            continue\n",
    "\n",
    "        if e[0] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[0]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[0]]\n",
    "            temp_index+=1\n",
    "\n",
    "        if e[3] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[3]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[3]]\n",
    "            temp_index+=1 \n",
    "\n",
    "    torch.save(node_uuid2index,f'node_uuid2index_9_{day}_host={hostname}_datalabel={datalabel}')\n",
    "       \n",
    "\n",
    "    dataset = TemporalData()\n",
    "    src = []\n",
    "    dst = []\n",
    "    msg = []\n",
    "    t = []\n",
    "    for e in (events):\n",
    "        if e[3] in node_uuid2index and e[0] in node_uuid2index:\n",
    "            # If the image path of the node is not recorded, then skip this edge\n",
    "            src.append(node_uuid2index[e[0]])\n",
    "            dst.append(node_uuid2index[e[3]])\n",
    "        #     msg.append(torch.cat([torch.from_numpy(node2higvec_bn[i[0]]), rel2vec[i[2]], torch.from_numpy(node2higvec_bn[i[1]])] ))\n",
    "\n",
    "            msg.append(torch.cat([str2tensor(e[1],node_uuid2path[e[0]]), \n",
    "                                  edge2vec[e[2]], \n",
    "                                  str2tensor(e[4],node_uuid2path[e[3]])\n",
    "                                 ]))\n",
    "            t.append(int(e[6]))\n",
    "\n",
    "    dataset.src = torch.tensor(src)\n",
    "    dataset.dst = torch.tensor(dst)\n",
    "    dataset.t = torch.tensor(t)\n",
    "    dataset.msg = torch.vstack(msg)\n",
    "    dataset.src = dataset.src.to(torch.long)\n",
    "    dataset.dst = dataset.dst.to(torch.long)\n",
    "    dataset.msg = dataset.msg.to(torch.float)\n",
    "    dataset.t = dataset.t.to(torch.long)\n",
    "    torch.save(dataset, f\"./data/evaluation/9_{day}_host={hostname}_datalabel={datalabel}.TemporalData\")  \n",
    "    "
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## h660 22"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 55,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      "  0%|          | 0/1 [00:00<?, ?it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=3889699\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 1/1 [16:36<00:00, 996.10s/it]\n"
     ]
    }
   ],
   "source": [
    "for day in tqdm(range(22,23)):\n",
    "    start_timestamp=datetime_to_timestamp_US('2019-09-'+str(day)+'T00:00:00')\n",
    "    end_timestamp=datetime_to_timestamp_US('2019-09-'+str(day+1)+'T00:00:00')\n",
    "    hostname='SysClient0660'\n",
    "    datalabel='benign'\n",
    "    sql=f\"\"\"\n",
    "    select * from event_table\n",
    "    where\n",
    "          timestamp>{start_timestamp} and timestamp<{end_timestamp}\n",
    "          and hostname='{hostname}' and data_label='{datalabel}' ORDER BY timestamp;\n",
    "    \"\"\"\n",
    "    cur.execute(sql)\n",
    "    events = cur.fetchall()\n",
    "    print(f\"{len(events)=}\")\n",
    "    \n",
    "    \n",
    "    \n",
    "    node_set=set()\n",
    "    node_uuid2index={}\n",
    "    temp_index=0\n",
    "    for e in events:\n",
    "        if e[3] not in node_uuid2path or e[0]  not in node_uuid2path:\n",
    "            continue\n",
    "\n",
    "        if e[0] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[0]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[0]]\n",
    "            temp_index+=1\n",
    "\n",
    "        if e[3] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[3]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[3]]\n",
    "            temp_index+=1 \n",
    "\n",
    "    torch.save(node_uuid2index,f'node_uuid2index_9_{day}_host={hostname}_datalabel={datalabel}')\n",
    "       \n",
    "\n",
    "    dataset = TemporalData()\n",
    "    src = []\n",
    "    dst = []\n",
    "    msg = []\n",
    "    t = []\n",
    "    for e in (events):\n",
    "        if e[3] in node_uuid2index and e[0] in node_uuid2index:\n",
    "            # If the image path of the node is not recorded, then skip this edge\n",
    "            src.append(node_uuid2index[e[0]])\n",
    "            dst.append(node_uuid2index[e[3]])\n",
    "        #     msg.append(torch.cat([torch.from_numpy(node2higvec_bn[i[0]]), rel2vec[i[2]], torch.from_numpy(node2higvec_bn[i[1]])] ))\n",
    "\n",
    "            msg.append(torch.cat([str2tensor(e[1],node_uuid2path[e[0]]), \n",
    "                                  edge2vec[e[2]], \n",
    "                                  str2tensor(e[4],node_uuid2path[e[3]])\n",
    "                                 ]))\n",
    "            t.append(int(e[6]))\n",
    "\n",
    "    dataset.src = torch.tensor(src)\n",
    "    dataset.dst = torch.tensor(dst)\n",
    "    dataset.t = torch.tensor(t)\n",
    "    dataset.msg = torch.vstack(msg)\n",
    "    dataset.src = dataset.src.to(torch.long)\n",
    "    dataset.dst = dataset.dst.to(torch.long)\n",
    "    dataset.msg = dataset.msg.to(torch.float)\n",
    "    dataset.t = dataset.t.to(torch.long)\n",
    "    torch.save(dataset, f\"./data/evaluation/9_{day}_host={hostname}_datalabel={datalabel}.TemporalData\")  \n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## h501 21"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      "  0%|          | 0/1 [00:00<?, ?it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=4337416\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "  0%|          | 0/1 [00:25<?, ?it/s]\n"
     ]
    },
    {
     "ename": "NameError",
     "evalue": "name 'str2tensor' is not defined",
     "output_type": "error",
     "traceback": [
      "\u001B[0;31m---------------------------------------------------------------------------\u001B[0m",
      "\u001B[0;31mNameError\u001B[0m                                 Traceback (most recent call last)",
      "Input \u001B[0;32mIn [7]\u001B[0m, in \u001B[0;36m<cell line: 1>\u001B[0;34m()\u001B[0m\n\u001B[1;32m     51\u001B[0m         dst\u001B[38;5;241m.\u001B[39mappend(node_uuid2index[e[\u001B[38;5;241m3\u001B[39m]])\n\u001B[1;32m     52\u001B[0m     \u001B[38;5;66;03m#     msg.append(torch.cat([torch.from_numpy(node2higvec_bn[i[0]]), rel2vec[i[2]], torch.from_numpy(node2higvec_bn[i[1]])] ))\u001B[39;00m\n\u001B[0;32m---> 54\u001B[0m         msg\u001B[38;5;241m.\u001B[39mappend(torch\u001B[38;5;241m.\u001B[39mcat([\u001B[43mstr2tensor\u001B[49m(e[\u001B[38;5;241m1\u001B[39m],node_uuid2path[e[\u001B[38;5;241m0\u001B[39m]]), \n\u001B[1;32m     55\u001B[0m                               edge2vec[e[\u001B[38;5;241m2\u001B[39m]], \n\u001B[1;32m     56\u001B[0m                               str2tensor(e[\u001B[38;5;241m4\u001B[39m],node_uuid2path[e[\u001B[38;5;241m3\u001B[39m]])\n\u001B[1;32m     57\u001B[0m                              ]))\n\u001B[1;32m     58\u001B[0m         t\u001B[38;5;241m.\u001B[39mappend(\u001B[38;5;28mint\u001B[39m(e[\u001B[38;5;241m6\u001B[39m]))\n\u001B[1;32m     60\u001B[0m dataset\u001B[38;5;241m.\u001B[39msrc \u001B[38;5;241m=\u001B[39m torch\u001B[38;5;241m.\u001B[39mtensor(src)\n",
      "\u001B[0;31mNameError\u001B[0m: name 'str2tensor' is not defined"
     ]
    }
   ],
   "source": [
    "for day in tqdm(range(21,22)):\n",
    "    start_timestamp=datetime_to_timestamp_US('2019-09-'+str(day)+'T00:00:00')\n",
    "    end_timestamp=datetime_to_timestamp_US('2019-09-'+str(day+1)+'T00:00:00')\n",
    "    hostname='SysClient0501'\n",
    "    datalabel='benign'\n",
    "    sql=f\"\"\"\n",
    "    select * from event_table\n",
    "    where\n",
    "          timestamp>{start_timestamp} and timestamp<{end_timestamp}\n",
    "          and hostname='{hostname}' and data_label='{datalabel}' ORDER BY timestamp;\n",
    "    \"\"\"\n",
    "    cur.execute(sql)\n",
    "    events = cur.fetchall()\n",
    "    print(f\"{len(events)=}\")\n",
    "    \n",
    "    \n",
    "    \n",
    "    node_set=set()\n",
    "    node_uuid2index={}\n",
    "    temp_index=0\n",
    "    for e in events:\n",
    "        if e[3] not in node_uuid2path or e[0]  not in node_uuid2path:\n",
    "            continue\n",
    "\n",
    "        if e[0] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[0]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[0]]\n",
    "            temp_index+=1\n",
    "\n",
    "        if e[3] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[3]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[3]]\n",
    "            temp_index+=1 \n",
    "\n",
    "    torch.save(node_uuid2index,f'node_uuid2index_9_{day}_host={hostname}_datalabel={datalabel}')\n",
    "       \n",
    "\n",
    "    dataset = TemporalData()\n",
    "    src = []\n",
    "    dst = []\n",
    "    msg = []\n",
    "    t = []\n",
    "    for e in (events):\n",
    "        if e[3] in node_uuid2index and e[0] in node_uuid2index:\n",
    "            # If the image path of the node is not recorded, then skip this edge\n",
    "            src.append(node_uuid2index[e[0]])\n",
    "            dst.append(node_uuid2index[e[3]])\n",
    "        #     msg.append(torch.cat([torch.from_numpy(node2higvec_bn[i[0]]), rel2vec[i[2]], torch.from_numpy(node2higvec_bn[i[1]])] ))\n",
    "\n",
    "            msg.append(torch.cat([str2tensor(e[1],node_uuid2path[e[0]]), \n",
    "                                  edge2vec[e[2]], \n",
    "                                  str2tensor(e[4],node_uuid2path[e[3]])\n",
    "                                 ]))\n",
    "            t.append(int(e[6]))\n",
    "\n",
    "    dataset.src = torch.tensor(src)\n",
    "    dataset.dst = torch.tensor(dst)\n",
    "    dataset.t = torch.tensor(t)\n",
    "    dataset.msg = torch.vstack(msg)\n",
    "    dataset.src = dataset.src.to(torch.long)\n",
    "    dataset.dst = dataset.dst.to(torch.long)\n",
    "    dataset.msg = dataset.msg.to(torch.float)\n",
    "    dataset.t = dataset.t.to(torch.long)\n",
    "    torch.save(dataset, f\"./data/evaluation/9_{day}_host={hostname}_datalabel={datalabel}.TemporalData\")  \n",
    "    "
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## h501 22"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 57,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      "  0%|          | 0/1 [00:00<?, ?it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=4263136\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 1/1 [18:00<00:00, 1080.32s/it]\n"
     ]
    }
   ],
   "source": [
    "for day in tqdm(range(22,23)):\n",
    "    start_timestamp=datetime_to_timestamp_US('2019-09-'+str(day)+'T00:00:00')\n",
    "    end_timestamp=datetime_to_timestamp_US('2019-09-'+str(day+1)+'T00:00:00')\n",
    "    hostname='SysClient0501'\n",
    "    datalabel='benign'\n",
    "    sql=f\"\"\"\n",
    "    select * from event_table\n",
    "    where\n",
    "          timestamp>{start_timestamp} and timestamp<{end_timestamp}\n",
    "          and hostname='{hostname}' and data_label='{datalabel}' ORDER BY timestamp;\n",
    "    \"\"\"\n",
    "    cur.execute(sql)\n",
    "    events = cur.fetchall()\n",
    "    print(f\"{len(events)=}\")\n",
    "    \n",
    "    \n",
    "    \n",
    "    node_set=set()\n",
    "    node_uuid2index={}\n",
    "    temp_index=0\n",
    "    for e in events:\n",
    "        if e[3] not in node_uuid2path or e[0]  not in node_uuid2path:\n",
    "            continue\n",
    "\n",
    "        if e[0] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[0]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[0]]\n",
    "            temp_index+=1\n",
    "\n",
    "        if e[3] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[3]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[3]]\n",
    "            temp_index+=1 \n",
    "\n",
    "    torch.save(node_uuid2index,f'node_uuid2index_9_{day}_host={hostname}_datalabel={datalabel}')\n",
    "\n",
    "    dataset = TemporalData()\n",
    "    src = []\n",
    "    dst = []\n",
    "    msg = []\n",
    "    t = []\n",
    "    for e in (events):\n",
    "        if e[3] in node_uuid2index and e[0] in node_uuid2index:\n",
    "            # If the image path of the node is not recorded, then skip this edge\n",
    "            src.append(node_uuid2index[e[0]])\n",
    "            dst.append(node_uuid2index[e[3]])\n",
    "        #     msg.append(torch.cat([torch.from_numpy(node2higvec_bn[i[0]]), rel2vec[i[2]], torch.from_numpy(node2higvec_bn[i[1]])] ))\n",
    "\n",
    "            msg.append(torch.cat([str2tensor(e[1],node_uuid2path[e[0]]), \n",
    "                                  edge2vec[e[2]], \n",
    "                                  str2tensor(e[4],node_uuid2path[e[3]])\n",
    "                                 ]))\n",
    "            t.append(int(e[6]))\n",
    "\n",
    "    dataset.src = torch.tensor(src)\n",
    "    dataset.dst = torch.tensor(dst)\n",
    "    dataset.t = torch.tensor(t)\n",
    "    dataset.msg = torch.vstack(msg)\n",
    "    dataset.src = dataset.src.to(torch.long)\n",
    "    dataset.dst = dataset.dst.to(torch.long)\n",
    "    dataset.msg = dataset.msg.to(torch.float)\n",
    "    dataset.t = dataset.t.to(torch.long)\n",
    "    torch.save(dataset, f\"./data/evaluation/9_{day}_host={hostname}_datalabel={datalabel}.TemporalData\")  \n",
    "    "
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## h051 22"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 59,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      "  0%|          | 0/1 [00:00<?, ?it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=4074941\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 1/1 [17:20<00:00, 1040.98s/it]\n"
     ]
    }
   ],
   "source": [
    "for day in tqdm(range(22,23)):\n",
    "    start_timestamp=datetime_to_timestamp_US('2019-09-'+str(day)+'T00:00:00')\n",
    "    end_timestamp=datetime_to_timestamp_US('2019-09-'+str(day+1)+'T00:00:00')\n",
    "    hostname='SysClient0051'\n",
    "    datalabel='benign'\n",
    "    sql=f\"\"\"\n",
    "    select * from event_table\n",
    "    where\n",
    "          timestamp>{start_timestamp} and timestamp<{end_timestamp}\n",
    "          and hostname='{hostname}' and data_label='{datalabel}' ORDER BY timestamp;\n",
    "    \"\"\"\n",
    "    cur.execute(sql)\n",
    "    events = cur.fetchall()\n",
    "    print(f\"{len(events)=}\")\n",
    "    \n",
    "    \n",
    "    \n",
    "    node_set=set()\n",
    "    node_uuid2index={}\n",
    "    temp_index=0\n",
    "    for e in events:\n",
    "        if e[3] not in node_uuid2path or e[0]  not in node_uuid2path:\n",
    "            continue\n",
    "\n",
    "        if e[0] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[0]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[0]]\n",
    "            temp_index+=1\n",
    "\n",
    "        if e[3] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[3]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[3]]\n",
    "            temp_index+=1 \n",
    "\n",
    "    torch.save(node_uuid2index,f'node_uuid2index_9_{day}_host={hostname}_datalabel={datalabel}')\n",
    "       \n",
    "\n",
    "    dataset = TemporalData()\n",
    "    src = []\n",
    "    dst = []\n",
    "    msg = []\n",
    "    t = []\n",
    "    for e in (events):\n",
    "        if e[3] in node_uuid2index and e[0] in node_uuid2index:\n",
    "            # If the image path of the node is not recorded, then skip this edge\n",
    "            src.append(node_uuid2index[e[0]])\n",
    "            dst.append(node_uuid2index[e[3]])\n",
    "        #     msg.append(torch.cat([torch.from_numpy(node2higvec_bn[i[0]]), rel2vec[i[2]], torch.from_numpy(node2higvec_bn[i[1]])] ))\n",
    "\n",
    "            msg.append(torch.cat([str2tensor(e[1],node_uuid2path[e[0]]), \n",
    "                                  edge2vec[e[2]], \n",
    "                                  str2tensor(e[4],node_uuid2path[e[3]])\n",
    "                                 ]))\n",
    "            t.append(int(e[6]))\n",
    "\n",
    "    dataset.src = torch.tensor(src)\n",
    "    dataset.dst = torch.tensor(dst)\n",
    "    dataset.t = torch.tensor(t)\n",
    "    dataset.msg = torch.vstack(msg)\n",
    "    dataset.src = dataset.src.to(torch.long)\n",
    "    dataset.dst = dataset.dst.to(torch.long)\n",
    "    dataset.msg = dataset.msg.to(torch.float)\n",
    "    dataset.t = dataset.t.to(torch.long)\n",
    "    torch.save(dataset, f\"./data/evaluation/9_{day}_host={hostname}_datalabel={datalabel}.TemporalData\")  \n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## h209 22"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 60,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      "  0%|          | 0/1 [00:00<?, ?it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=3853947\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 1/1 [17:31<00:00, 1051.36s/it]\n"
     ]
    }
   ],
   "source": [
    "for day in tqdm(range(22,23)):\n",
    "    start_timestamp=datetime_to_timestamp_US('2019-09-'+str(day)+'T00:00:00')\n",
    "    end_timestamp=datetime_to_timestamp_US('2019-09-'+str(day+1)+'T00:00:00')\n",
    "    hostname='SysClient0209'\n",
    "    datalabel='benign'\n",
    "    sql=f\"\"\"\n",
    "    select * from event_table\n",
    "    where\n",
    "          timestamp>{start_timestamp} and timestamp<{end_timestamp}\n",
    "          and hostname='{hostname}' and data_label='{datalabel}' ORDER BY timestamp;\n",
    "    \"\"\"\n",
    "    cur.execute(sql)\n",
    "    events = cur.fetchall()\n",
    "    print(f\"{len(events)=}\")\n",
    "    \n",
    "    \n",
    "    \n",
    "    node_set=set()\n",
    "    node_uuid2index={}\n",
    "    temp_index=0\n",
    "    for e in events:\n",
    "        if e[3] not in node_uuid2path or e[0]  not in node_uuid2path:\n",
    "            continue\n",
    "\n",
    "        if e[0] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[0]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[0]]\n",
    "            temp_index+=1\n",
    "\n",
    "        if e[3] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[3]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[3]]\n",
    "            temp_index+=1 \n",
    "\n",
    "    torch.save(node_uuid2index,f'node_uuid2index_9_{day}_host={hostname}_datalabel={datalabel}')\n",
    "       \n",
    "\n",
    "    dataset = TemporalData()\n",
    "    src = []\n",
    "    dst = []\n",
    "    msg = []\n",
    "    t = []\n",
    "    for e in (events):\n",
    "        if e[3] in node_uuid2index and e[0] in node_uuid2index:\n",
    "            # If the image path of the node is not recorded, then skip this edge\n",
    "            src.append(node_uuid2index[e[0]])\n",
    "            dst.append(node_uuid2index[e[3]])\n",
    "        #     msg.append(torch.cat([torch.from_numpy(node2higvec_bn[i[0]]), rel2vec[i[2]], torch.from_numpy(node2higvec_bn[i[1]])] ))\n",
    "\n",
    "            msg.append(torch.cat([str2tensor(e[1],node_uuid2path[e[0]]), \n",
    "                                  edge2vec[e[2]], \n",
    "                                  str2tensor(e[4],node_uuid2path[e[3]])\n",
    "                                 ]))\n",
    "            t.append(int(e[6]))\n",
    "\n",
    "    dataset.src = torch.tensor(src)\n",
    "    dataset.dst = torch.tensor(dst)\n",
    "    dataset.t = torch.tensor(t)\n",
    "    dataset.msg = torch.vstack(msg)\n",
    "    dataset.src = dataset.src.to(torch.long)\n",
    "    dataset.dst = dataset.dst.to(torch.long)\n",
    "    dataset.msg = dataset.msg.to(torch.float)\n",
    "    dataset.t = dataset.t.to(torch.long)\n",
    "    torch.save(dataset, f\"./data/evaluation/9_{day}_host={hostname}_datalabel={datalabel}.TemporalData\")  \n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Generate the validation set"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## h209 23"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      "  0%|          | 0/1 [00:00<?, ?it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=1462775\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 1/1 [06:28<00:00, 388.73s/it]\n"
     ]
    }
   ],
   "source": [
    "for day in tqdm(range(23,24)):\n",
    "    start_timestamp=datetime_to_timestamp_US('2019-09-'+str(day)+'T00:00:00')\n",
    "    end_timestamp=datetime_to_timestamp_US('2019-09-'+str(day+1)+'T00:00:00')\n",
    "    hostname='SysClient0209'\n",
    "    datalabel='benign'\n",
    "    sql=f\"\"\"\n",
    "    select * from event_table\n",
    "    where\n",
    "          timestamp>{start_timestamp} and timestamp<{end_timestamp}\n",
    "          and hostname='{hostname}' and data_label='{datalabel}' ORDER BY timestamp;\n",
    "    \"\"\"\n",
    "    cur.execute(sql)\n",
    "    events = cur.fetchall()\n",
    "    print(f\"{len(events)=}\")\n",
    "    \n",
    "    \n",
    "    \n",
    "    node_set=set()\n",
    "    node_uuid2index={}\n",
    "    temp_index=0\n",
    "    for e in events:\n",
    "        if e[3] not in node_uuid2path or e[0]  not in node_uuid2path:\n",
    "            continue\n",
    "\n",
    "        if e[0] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[0]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[0]]\n",
    "            temp_index+=1\n",
    "\n",
    "        if e[3] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[3]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[3]]\n",
    "            temp_index+=1 \n",
    "\n",
    "    torch.save(node_uuid2index,f'node_uuid2index_9_{day}_host={hostname}_datalabel={datalabel}')\n",
    "       \n",
    "\n",
    "    dataset = TemporalData()\n",
    "    src = []\n",
    "    dst = []\n",
    "    msg = []\n",
    "    t = []\n",
    "    for e in (events):\n",
    "        if e[3] in node_uuid2index and e[0] in node_uuid2index:\n",
    "            # If the image path of the node is not recorded, then skip this edge\n",
    "            src.append(node_uuid2index[e[0]])\n",
    "            dst.append(node_uuid2index[e[3]])\n",
    "        #     msg.append(torch.cat([torch.from_numpy(node2higvec_bn[i[0]]), rel2vec[i[2]], torch.from_numpy(node2higvec_bn[i[1]])] ))\n",
    "\n",
    "            msg.append(torch.cat([str2tensor(e[1],node_uuid2path[e[0]]), \n",
    "                                  edge2vec[e[2]], \n",
    "                                  str2tensor(e[4],node_uuid2path[e[3]])\n",
    "                                 ]))\n",
    "            t.append(int(e[6]))\n",
    "\n",
    "    dataset.src = torch.tensor(src)\n",
    "    dataset.dst = torch.tensor(dst)\n",
    "    dataset.t = torch.tensor(t)\n",
    "    dataset.msg = torch.vstack(msg)\n",
    "    dataset.src = dataset.src.to(torch.long)\n",
    "    dataset.dst = dataset.dst.to(torch.long)\n",
    "    dataset.msg = dataset.msg.to(torch.float)\n",
    "    dataset.t = dataset.t.to(torch.long)\n",
    "    torch.save(dataset, f\"./data/evaluation/9_{day}_host={hostname}_datalabel={datalabel}.TemporalData\")  \n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Generate the evaluation set"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## h201 23-25"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 61,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      "  0%|          | 0/3 [00:00<?, ?it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=2354159\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      " 33%|███▎      | 1/3 [10:05<20:10, 605.36s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=3720913\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      " 67%|██████▋   | 2/3 [25:52<13:26, 806.31s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=2195398\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 3/3 [35:17<00:00, 705.94s/it]\n"
     ]
    }
   ],
   "source": [
    "for day in tqdm(range(23,26)):\n",
    "    start_timestamp=datetime_to_timestamp_US('2019-09-'+str(day)+'T00:00:00')\n",
    "    end_timestamp=datetime_to_timestamp_US('2019-09-'+str(day+1)+'T00:00:00')\n",
    "    hostname='SysClient0201'\n",
    "    datalabel='evaluation'\n",
    "    sql=f\"\"\"\n",
    "    select * from event_table\n",
    "    where\n",
    "          timestamp>{start_timestamp} and timestamp<{end_timestamp}\n",
    "          and hostname='{hostname}' and data_label='{datalabel}' ORDER BY timestamp;\n",
    "    \"\"\"\n",
    "    cur.execute(sql)\n",
    "    events = cur.fetchall()\n",
    "    print(f\"{len(events)=}\")\n",
    "    \n",
    "    \n",
    "    \n",
    "    node_set=set()\n",
    "    node_uuid2index={}\n",
    "    temp_index=0\n",
    "    for e in events:\n",
    "        if e[3] not in node_uuid2path or e[0]  not in node_uuid2path:\n",
    "            continue\n",
    "\n",
    "        if e[0] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[0]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[0]]\n",
    "            temp_index+=1\n",
    "\n",
    "        if e[3] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[3]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[3]]\n",
    "            temp_index+=1 \n",
    "\n",
    "    torch.save(node_uuid2index,f'node_uuid2index_9_{day}_host={hostname}_datalabel={datalabel}')\n",
    "\n",
    "    dataset = TemporalData()\n",
    "    src = []\n",
    "    dst = []\n",
    "    msg = []\n",
    "    t = []\n",
    "    for e in (events):\n",
    "        if e[3] in node_uuid2index and e[0] in node_uuid2index:\n",
    "            # If the image path of the node is not recorded, then skip this edge\n",
    "            src.append(node_uuid2index[e[0]])\n",
    "            dst.append(node_uuid2index[e[3]])\n",
    "        #     msg.append(torch.cat([torch.from_numpy(node2higvec_bn[i[0]]), rel2vec[i[2]], torch.from_numpy(node2higvec_bn[i[1]])] ))\n",
    "\n",
    "            msg.append(torch.cat([str2tensor(e[1],node_uuid2path[e[0]]), \n",
    "                                  edge2vec[e[2]], \n",
    "                                  str2tensor(e[4],node_uuid2path[e[3]])\n",
    "                                 ]))\n",
    "            t.append(int(e[6]))\n",
    "\n",
    "    dataset.src = torch.tensor(src)\n",
    "    dataset.dst = torch.tensor(dst)\n",
    "    dataset.t = torch.tensor(t)\n",
    "    dataset.msg = torch.vstack(msg)\n",
    "    dataset.src = dataset.src.to(torch.long)\n",
    "    dataset.dst = dataset.dst.to(torch.long)\n",
    "    dataset.msg = dataset.msg.to(torch.float)\n",
    "    dataset.t = dataset.t.to(torch.long)\n",
    "    torch.save(dataset, f\"./data/evaluation/9_{day}_host={hostname}_datalabel={datalabel}.TemporalData\")  \n",
    "    "
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## h402 23-25"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 62,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      "  0%|          | 0/3 [00:00<?, ?it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=2513800\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      " 33%|███▎      | 1/3 [10:47<21:34, 647.40s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=3844461\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      " 67%|██████▋   | 2/3 [27:08<14:03, 843.44s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=2317807\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 3/3 [37:06<00:00, 742.23s/it]\n"
     ]
    }
   ],
   "source": [
    "for day in tqdm(range(23,26)):\n",
    "    start_timestamp=datetime_to_timestamp_US('2019-09-'+str(day)+'T00:00:00')\n",
    "    end_timestamp=datetime_to_timestamp_US('2019-09-'+str(day+1)+'T00:00:00')\n",
    "    hostname='SysClient0402'\n",
    "    datalabel='evaluation'\n",
    "    sql=f\"\"\"\n",
    "    select * from event_table\n",
    "    where\n",
    "          timestamp>{start_timestamp} and timestamp<{end_timestamp}\n",
    "          and hostname='{hostname}' and data_label='{datalabel}' ORDER BY timestamp;\n",
    "    \"\"\"\n",
    "    cur.execute(sql)\n",
    "    events = cur.fetchall()\n",
    "    print(f\"{len(events)=}\")\n",
    "    \n",
    "    \n",
    "    \n",
    "    node_set=set()\n",
    "    node_uuid2index={}\n",
    "    temp_index=0\n",
    "    for e in events:\n",
    "        if e[3] not in node_uuid2path or e[0]  not in node_uuid2path:\n",
    "            continue\n",
    "\n",
    "        if e[0] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[0]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[0]]\n",
    "            temp_index+=1\n",
    "\n",
    "        if e[3] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[3]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[3]]\n",
    "            temp_index+=1 \n",
    "\n",
    "    torch.save(node_uuid2index,f'node_uuid2index_9_{day}_host={hostname}_datalabel={datalabel}')\n",
    "       \n",
    "\n",
    "    dataset = TemporalData()\n",
    "    src = []\n",
    "    dst = []\n",
    "    msg = []\n",
    "    t = []\n",
    "    for e in (events):\n",
    "        if e[3] in node_uuid2index and e[0] in node_uuid2index:\n",
    "            # If the image path of the node is not recorded, then skip this edge\n",
    "            src.append(node_uuid2index[e[0]])\n",
    "            dst.append(node_uuid2index[e[3]])\n",
    "        #     msg.append(torch.cat([torch.from_numpy(node2higvec_bn[i[0]]), rel2vec[i[2]], torch.from_numpy(node2higvec_bn[i[1]])] ))\n",
    "\n",
    "            msg.append(torch.cat([str2tensor(e[1],node_uuid2path[e[0]]), \n",
    "                                  edge2vec[e[2]], \n",
    "                                  str2tensor(e[4],node_uuid2path[e[3]])\n",
    "                                 ]))\n",
    "            t.append(int(e[6]))\n",
    "\n",
    "    dataset.src = torch.tensor(src)\n",
    "    dataset.dst = torch.tensor(dst)\n",
    "    dataset.t = torch.tensor(t)\n",
    "    dataset.msg = torch.vstack(msg)\n",
    "    dataset.src = dataset.src.to(torch.long)\n",
    "    dataset.dst = dataset.dst.to(torch.long)\n",
    "    dataset.msg = dataset.msg.to(torch.float)\n",
    "    dataset.t = dataset.t.to(torch.long)\n",
    "    torch.save(dataset, f\"./data/evaluation/9_{day}_host={hostname}_datalabel={datalabel}.TemporalData\")  \n",
    "    "
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## h660 23-25"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 63,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      "  0%|          | 0/3 [00:00<?, ?it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=2317440\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      " 33%|███▎      | 1/3 [10:38<21:16, 638.03s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=3558940\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      " 67%|██████▋   | 2/3 [25:48<13:18, 798.34s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=2314759\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 3/3 [35:44<00:00, 714.82s/it]\n"
     ]
    }
   ],
   "source": [
    "for day in tqdm(range(23,26)):\n",
    "    start_timestamp=datetime_to_timestamp_US('2019-09-'+str(day)+'T00:00:00')\n",
    "    end_timestamp=datetime_to_timestamp_US('2019-09-'+str(day+1)+'T00:00:00')\n",
    "    hostname='SysClient0660'\n",
    "    datalabel='evaluation'\n",
    "    sql=f\"\"\"\n",
    "    select * from event_table\n",
    "    where\n",
    "          timestamp>{start_timestamp} and timestamp<{end_timestamp}\n",
    "          and hostname='{hostname}' and data_label='{datalabel}' ORDER BY timestamp;\n",
    "    \"\"\"\n",
    "    cur.execute(sql)\n",
    "    events = cur.fetchall()\n",
    "    print(f\"{len(events)=}\")\n",
    "    \n",
    "    \n",
    "    \n",
    "    node_set=set()\n",
    "    node_uuid2index={}\n",
    "    temp_index=0\n",
    "    for e in events:\n",
    "        if e[3] not in node_uuid2path or e[0]  not in node_uuid2path:\n",
    "            continue\n",
    "\n",
    "        if e[0] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[0]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[0]]\n",
    "            temp_index+=1\n",
    "\n",
    "        if e[3] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[3]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[3]]\n",
    "            temp_index+=1 \n",
    "\n",
    "    torch.save(node_uuid2index,f'node_uuid2index_9_{day}_host={hostname}_datalabel={datalabel}')\n",
    "       \n",
    "\n",
    "    dataset = TemporalData()\n",
    "    src = []\n",
    "    dst = []\n",
    "    msg = []\n",
    "    t = []\n",
    "    for e in (events):\n",
    "        if e[3] in node_uuid2index and e[0] in node_uuid2index:\n",
    "            # If the image path of the node is not recorded, then skip this edge\n",
    "            src.append(node_uuid2index[e[0]])\n",
    "            dst.append(node_uuid2index[e[3]])\n",
    "        #     msg.append(torch.cat([torch.from_numpy(node2higvec_bn[i[0]]), rel2vec[i[2]], torch.from_numpy(node2higvec_bn[i[1]])] ))\n",
    "\n",
    "            msg.append(torch.cat([str2tensor(e[1],node_uuid2path[e[0]]), \n",
    "                                  edge2vec[e[2]], \n",
    "                                  str2tensor(e[4],node_uuid2path[e[3]])\n",
    "                                 ]))\n",
    "            t.append(int(e[6]))\n",
    "\n",
    "    dataset.src = torch.tensor(src)\n",
    "    dataset.dst = torch.tensor(dst)\n",
    "    dataset.t = torch.tensor(t)\n",
    "    dataset.msg = torch.vstack(msg)\n",
    "    dataset.src = dataset.src.to(torch.long)\n",
    "    dataset.dst = dataset.dst.to(torch.long)\n",
    "    dataset.msg = dataset.msg.to(torch.float)\n",
    "    dataset.t = dataset.t.to(torch.long)\n",
    "    torch.save(dataset, f\"./data/evaluation/9_{day}_host={hostname}_datalabel={datalabel}.TemporalData\")  \n",
    "    "
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## h501 23-25"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 64,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      "  0%|          | 0/3 [00:00<?, ?it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=2394555\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      " 33%|███▎      | 1/3 [10:18<20:36, 618.05s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=3954364\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      " 67%|██████▋   | 2/3 [27:02<14:05, 845.53s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=2386523\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 3/3 [37:56<00:00, 758.76s/it]\n"
     ]
    }
   ],
   "source": [
    "for day in tqdm(range(23,26)):\n",
    "    start_timestamp=datetime_to_timestamp_US('2019-09-'+str(day)+'T00:00:00')\n",
    "    end_timestamp=datetime_to_timestamp_US('2019-09-'+str(day+1)+'T00:00:00')\n",
    "    hostname='SysClient0501'\n",
    "    datalabel='evaluation'\n",
    "    sql=f\"\"\"\n",
    "    select * from event_table\n",
    "    where\n",
    "          timestamp>{start_timestamp} and timestamp<{end_timestamp}\n",
    "          and hostname='{hostname}' and data_label='{datalabel}' ORDER BY timestamp;\n",
    "    \"\"\"\n",
    "    cur.execute(sql)\n",
    "    events = cur.fetchall()\n",
    "    print(f\"{len(events)=}\")\n",
    "    \n",
    "    \n",
    "    \n",
    "    node_set=set()\n",
    "    node_uuid2index={}\n",
    "    temp_index=0\n",
    "    for e in events:\n",
    "        if e[3] not in node_uuid2path or e[0]  not in node_uuid2path:\n",
    "            continue\n",
    "\n",
    "        if e[0] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[0]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[0]]\n",
    "            temp_index+=1\n",
    "\n",
    "        if e[3] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[3]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[3]]\n",
    "            temp_index+=1 \n",
    "\n",
    "    torch.save(node_uuid2index,f'node_uuid2index_9_{day}_host={hostname}_datalabel={datalabel}')\n",
    "       \n",
    "\n",
    "    dataset = TemporalData()\n",
    "    src = []\n",
    "    dst = []\n",
    "    msg = []\n",
    "    t = []\n",
    "    for e in (events):\n",
    "        if e[3] in node_uuid2index and e[0] in node_uuid2index:\n",
    "            # If the image path of the node is not recorded, then skip this edge\n",
    "            src.append(node_uuid2index[e[0]])\n",
    "            dst.append(node_uuid2index[e[3]])\n",
    "        #     msg.append(torch.cat([torch.from_numpy(node2higvec_bn[i[0]]), rel2vec[i[2]], torch.from_numpy(node2higvec_bn[i[1]])] ))\n",
    "\n",
    "            msg.append(torch.cat([str2tensor(e[1],node_uuid2path[e[0]]), \n",
    "                                  edge2vec[e[2]], \n",
    "                                  str2tensor(e[4],node_uuid2path[e[3]])\n",
    "                                 ]))\n",
    "            t.append(int(e[6]))\n",
    "\n",
    "    dataset.src = torch.tensor(src)\n",
    "    dataset.dst = torch.tensor(dst)\n",
    "    dataset.t = torch.tensor(t)\n",
    "    dataset.msg = torch.vstack(msg)\n",
    "    dataset.src = dataset.src.to(torch.long)\n",
    "    dataset.dst = dataset.dst.to(torch.long)\n",
    "    dataset.msg = dataset.msg.to(torch.float)\n",
    "    dataset.t = dataset.t.to(torch.long)\n",
    "    torch.save(dataset, f\"./data/evaluation/9_{day}_host={hostname}_datalabel={datalabel}.TemporalData\")  \n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## h051 23-25"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 65,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      "  0%|          | 0/3 [00:00<?, ?it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=2381599\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      " 33%|███▎      | 1/3 [10:16<20:33, 616.74s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=3741872\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      " 67%|██████▋   | 2/3 [26:14<13:37, 817.43s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=2322399\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 3/3 [36:14<00:00, 724.91s/it]\n"
     ]
    }
   ],
   "source": [
    "for day in tqdm(range(23,26)):\n",
    "    start_timestamp=datetime_to_timestamp_US('2019-09-'+str(day)+'T00:00:00')\n",
    "    end_timestamp=datetime_to_timestamp_US('2019-09-'+str(day+1)+'T00:00:00')\n",
    "    hostname='SysClient0051'\n",
    "    datalabel='evaluation'\n",
    "    sql=f\"\"\"\n",
    "    select * from event_table\n",
    "    where\n",
    "          timestamp>{start_timestamp} and timestamp<{end_timestamp}\n",
    "          and hostname='{hostname}' and data_label='{datalabel}' ORDER BY timestamp;\n",
    "    \"\"\"\n",
    "    cur.execute(sql)\n",
    "    events = cur.fetchall()\n",
    "    print(f\"{len(events)=}\")\n",
    "    \n",
    "    \n",
    "    \n",
    "    node_set=set()\n",
    "    node_uuid2index={}\n",
    "    temp_index=0\n",
    "    for e in events:\n",
    "        if e[3] not in node_uuid2path or e[0]  not in node_uuid2path:\n",
    "            continue\n",
    "\n",
    "        if e[0] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[0]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[0]]\n",
    "            temp_index+=1\n",
    "\n",
    "        if e[3] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[3]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[3]]\n",
    "            temp_index+=1 \n",
    "\n",
    "    torch.save(node_uuid2index,f'node_uuid2index_9_{day}_host={hostname}_datalabel={datalabel}')\n",
    "       \n",
    "\n",
    "    dataset = TemporalData()\n",
    "    src = []\n",
    "    dst = []\n",
    "    msg = []\n",
    "    t = []\n",
    "    for e in (events):\n",
    "        if e[3] in node_uuid2index and e[0] in node_uuid2index:\n",
    "            # If the image path of the node is not recorded, then skip this edge\n",
    "            src.append(node_uuid2index[e[0]])\n",
    "            dst.append(node_uuid2index[e[3]])\n",
    "        #     msg.append(torch.cat([torch.from_numpy(node2higvec_bn[i[0]]), rel2vec[i[2]], torch.from_numpy(node2higvec_bn[i[1]])] ))\n",
    "\n",
    "            msg.append(torch.cat([str2tensor(e[1],node_uuid2path[e[0]]), \n",
    "                                  edge2vec[e[2]], \n",
    "                                  str2tensor(e[4],node_uuid2path[e[3]])\n",
    "                                 ]))\n",
    "            t.append(int(e[6]))\n",
    "\n",
    "    dataset.src = torch.tensor(src)\n",
    "    dataset.dst = torch.tensor(dst)\n",
    "    dataset.t = torch.tensor(t)\n",
    "    dataset.msg = torch.vstack(msg)\n",
    "    dataset.src = dataset.src.to(torch.long)\n",
    "    dataset.dst = dataset.dst.to(torch.long)\n",
    "    dataset.msg = dataset.msg.to(torch.float)\n",
    "    dataset.t = dataset.t.to(torch.long)\n",
    "    torch.save(dataset, f\"./data/evaluation/9_{day}_host={hostname}_datalabel={datalabel}.TemporalData\")  \n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## h207 23-25"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 66,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      "  0%|          | 0/3 [00:00<?, ?it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=2303987\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      " 33%|███▎      | 1/3 [09:48<19:36, 588.04s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=3642215\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\r",
      " 67%|██████▋   | 2/3 [25:12<13:06, 786.06s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(events)=2171961\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████| 3/3 [34:30<00:00, 690.20s/it]\n"
     ]
    }
   ],
   "source": [
    "for day in tqdm(range(23,26)):\n",
    "    start_timestamp=datetime_to_timestamp_US('2019-09-'+str(day)+'T00:00:00')\n",
    "    end_timestamp=datetime_to_timestamp_US('2019-09-'+str(day+1)+'T00:00:00')\n",
    "    hostname='SysClient0207'\n",
    "    datalabel='evaluation'\n",
    "    sql=f\"\"\"\n",
    "    select * from event_table\n",
    "    where\n",
    "          timestamp>{start_timestamp} and timestamp<{end_timestamp}\n",
    "          and hostname='{hostname}' and data_label='{datalabel}' ORDER BY timestamp;\n",
    "    \"\"\"\n",
    "    cur.execute(sql)\n",
    "    events = cur.fetchall()\n",
    "    print(f\"{len(events)=}\")\n",
    "    \n",
    "    \n",
    "    \n",
    "    node_set=set()\n",
    "    node_uuid2index={}\n",
    "    temp_index=0\n",
    "    for e in events:\n",
    "        if e[3] not in node_uuid2path or e[0]  not in node_uuid2path:\n",
    "            continue\n",
    "\n",
    "        if e[0] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[0]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[0]]\n",
    "            temp_index+=1\n",
    "\n",
    "        if e[3] in node_uuid2index:\n",
    "            pass\n",
    "        else:\n",
    "            node_uuid2index[e[3]]=temp_index\n",
    "            node_uuid2index[temp_index]=node_uuid2path[e[3]]\n",
    "            temp_index+=1 \n",
    "\n",
    "    torch.save(node_uuid2index,f'node_uuid2index_9_{day}_host={hostname}_datalabel={datalabel}')\n",
    "       \n",
    "\n",
    "    dataset = TemporalData()\n",
    "    src = []\n",
    "    dst = []\n",
    "    msg = []\n",
    "    t = []\n",
    "    for e in (events):\n",
    "        if e[3] in node_uuid2index and e[0] in node_uuid2index:\n",
    "            # If the image path of the node is not recorded, then skip this edge\n",
    "            src.append(node_uuid2index[e[0]])\n",
    "            dst.append(node_uuid2index[e[3]])\n",
    "        #     msg.append(torch.cat([torch.from_numpy(node2higvec_bn[i[0]]), rel2vec[i[2]], torch.from_numpy(node2higvec_bn[i[1]])] ))\n",
    "\n",
    "            msg.append(torch.cat([str2tensor(e[1],node_uuid2path[e[0]]), \n",
    "                                  edge2vec[e[2]], \n",
    "                                  str2tensor(e[4],node_uuid2path[e[3]])\n",
    "                                 ]))\n",
    "            t.append(int(e[6]))\n",
    "\n",
    "    dataset.src = torch.tensor(src)\n",
    "    dataset.dst = torch.tensor(dst)\n",
    "    dataset.t = torch.tensor(t)\n",
    "    dataset.msg = torch.vstack(msg)\n",
    "    dataset.src = dataset.src.to(torch.long)\n",
    "    dataset.dst = dataset.dst.to(torch.long)\n",
    "    dataset.msg = dataset.msg.to(torch.float)\n",
    "    dataset.t = dataset.t.to(torch.long)\n",
    "    torch.save(dataset, f\"./data/evaluation/9_{day}_host={hostname}_datalabel={datalabel}.TemporalData\")  \n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# A CSV file containing the ground truth nodes&edges"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "label_df=pd.read_csv(\"./labels.csv\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>hostname</th>\n",
       "      <th>id</th>\n",
       "      <th>objectID</th>\n",
       "      <th>actorID</th>\n",
       "      <th>timestamp</th>\n",
       "      <th>object</th>\n",
       "      <th>action</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>SysClient0201.systemia.com</td>\n",
       "      <td>43fb9623-3cd1-45ec-ab22-dbe46e75240e</td>\n",
       "      <td>96913629-c1c9-4503-9586-4a91de0e7311</td>\n",
       "      <td>af6b49d5-f648-41a4-946d-d92b174bae47</td>\n",
       "      <td>2019-09-23T11:23:55.857-04:00</td>\n",
       "      <td>PROCESS</td>\n",
       "      <td>CREATE</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>SysClient0201.systemia.com</td>\n",
       "      <td>78fccbc8-58d1-4598-ae21-99f57ee57ed8</td>\n",
       "      <td>b53c1986-842c-493a-910c-78b55da2575f</td>\n",
       "      <td>96913629-c1c9-4503-9586-4a91de0e7311</td>\n",
       "      <td>2019-09-23T11:25:26.418-04:00</td>\n",
       "      <td>SHELL</td>\n",
       "      <td>COMMAND</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>SysClient0201.systemia.com</td>\n",
       "      <td>fb89e8be-47a1-418c-9bb8-a4c702694c74</td>\n",
       "      <td>b53c1986-842c-493a-910c-78b55da2575f</td>\n",
       "      <td>96913629-c1c9-4503-9586-4a91de0e7311</td>\n",
       "      <td>2019-09-23T11:25:26.416-04:00</td>\n",
       "      <td>SHELL</td>\n",
       "      <td>COMMAND</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>SysClient0201.systemia.com</td>\n",
       "      <td>05e5bde3-8db5-410a-ad75-de676bd14ebb</td>\n",
       "      <td>b53c1986-842c-493a-910c-78b55da2575f</td>\n",
       "      <td>96913629-c1c9-4503-9586-4a91de0e7311</td>\n",
       "      <td>2019-09-23T11:25:26.436-04:00</td>\n",
       "      <td>SHELL</td>\n",
       "      <td>COMMAND</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>SysClient0201.systemia.com</td>\n",
       "      <td>1bdb7482-a548-40f2-b648-ea258e6c2448</td>\n",
       "      <td>b53c1986-842c-493a-910c-78b55da2575f</td>\n",
       "      <td>96913629-c1c9-4503-9586-4a91de0e7311</td>\n",
       "      <td>2019-09-23T11:25:26.434-04:00</td>\n",
       "      <td>SHELL</td>\n",
       "      <td>COMMAND</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>...</th>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>292362</th>\n",
       "      <td>SysClient0051.systemia.com</td>\n",
       "      <td>a41bc6ec-4215-4813-8b8a-b2d395005280</td>\n",
       "      <td>3b2eec89-962b-43d2-87a1-ff81bda6d6e4</td>\n",
       "      <td>7bfb883a-7214-486f-922a-0ed019c85579</td>\n",
       "      <td>2019-09-25T11:11:07.717-04:00</td>\n",
       "      <td>FLOW</td>\n",
       "      <td>START</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>292363</th>\n",
       "      <td>SysClient0051.systemia.com</td>\n",
       "      <td>9666376b-09ea-4f28-ae5e-4be27a21f6ac</td>\n",
       "      <td>a5e9769b-ad18-4d56-86b7-81deb81defb4</td>\n",
       "      <td>98031f5d-d754-4547-8bbb-b8a04f537037</td>\n",
       "      <td>2019-09-25T11:12:13.911-04:00</td>\n",
       "      <td>FLOW</td>\n",
       "      <td>START</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>292364</th>\n",
       "      <td>SysClient0051.systemia.com</td>\n",
       "      <td>2e0d72da-7ca8-4a3a-b30d-a21f8bebfbf6</td>\n",
       "      <td>a5e9769b-ad18-4d56-86b7-81deb81defb4</td>\n",
       "      <td>98031f5d-d754-4547-8bbb-b8a04f537037</td>\n",
       "      <td>2019-09-25T11:12:14.925-04:00</td>\n",
       "      <td>FLOW</td>\n",
       "      <td>START</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>292365</th>\n",
       "      <td>SysClient0051.systemia.com</td>\n",
       "      <td>2d41a09a-30ae-4e4d-a173-16fe760b6d16</td>\n",
       "      <td>a5e9769b-ad18-4d56-86b7-81deb81defb4</td>\n",
       "      <td>98031f5d-d754-4547-8bbb-b8a04f537037</td>\n",
       "      <td>2019-09-25T11:12:15.941-04:00</td>\n",
       "      <td>FLOW</td>\n",
       "      <td>START</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>292366</th>\n",
       "      <td>SysClient0051.systemia.com</td>\n",
       "      <td>ab9659e2-fbbd-483a-a14a-f415300c6b17</td>\n",
       "      <td>a5e9769b-ad18-4d56-86b7-81deb81defb4</td>\n",
       "      <td>98031f5d-d754-4547-8bbb-b8a04f537037</td>\n",
       "      <td>2019-09-25T11:12:16.956-04:00</td>\n",
       "      <td>FLOW</td>\n",
       "      <td>START</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>292367 rows × 7 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "                          hostname                                    id  \\\n",
       "0       SysClient0201.systemia.com  43fb9623-3cd1-45ec-ab22-dbe46e75240e   \n",
       "1       SysClient0201.systemia.com  78fccbc8-58d1-4598-ae21-99f57ee57ed8   \n",
       "2       SysClient0201.systemia.com  fb89e8be-47a1-418c-9bb8-a4c702694c74   \n",
       "3       SysClient0201.systemia.com  05e5bde3-8db5-410a-ad75-de676bd14ebb   \n",
       "4       SysClient0201.systemia.com  1bdb7482-a548-40f2-b648-ea258e6c2448   \n",
       "...                            ...                                   ...   \n",
       "292362  SysClient0051.systemia.com  a41bc6ec-4215-4813-8b8a-b2d395005280   \n",
       "292363  SysClient0051.systemia.com  9666376b-09ea-4f28-ae5e-4be27a21f6ac   \n",
       "292364  SysClient0051.systemia.com  2e0d72da-7ca8-4a3a-b30d-a21f8bebfbf6   \n",
       "292365  SysClient0051.systemia.com  2d41a09a-30ae-4e4d-a173-16fe760b6d16   \n",
       "292366  SysClient0051.systemia.com  ab9659e2-fbbd-483a-a14a-f415300c6b17   \n",
       "\n",
       "                                    objectID  \\\n",
       "0       96913629-c1c9-4503-9586-4a91de0e7311   \n",
       "1       b53c1986-842c-493a-910c-78b55da2575f   \n",
       "2       b53c1986-842c-493a-910c-78b55da2575f   \n",
       "3       b53c1986-842c-493a-910c-78b55da2575f   \n",
       "4       b53c1986-842c-493a-910c-78b55da2575f   \n",
       "...                                      ...   \n",
       "292362  3b2eec89-962b-43d2-87a1-ff81bda6d6e4   \n",
       "292363  a5e9769b-ad18-4d56-86b7-81deb81defb4   \n",
       "292364  a5e9769b-ad18-4d56-86b7-81deb81defb4   \n",
       "292365  a5e9769b-ad18-4d56-86b7-81deb81defb4   \n",
       "292366  a5e9769b-ad18-4d56-86b7-81deb81defb4   \n",
       "\n",
       "                                     actorID                      timestamp  \\\n",
       "0       af6b49d5-f648-41a4-946d-d92b174bae47  2019-09-23T11:23:55.857-04:00   \n",
       "1       96913629-c1c9-4503-9586-4a91de0e7311  2019-09-23T11:25:26.418-04:00   \n",
       "2       96913629-c1c9-4503-9586-4a91de0e7311  2019-09-23T11:25:26.416-04:00   \n",
       "3       96913629-c1c9-4503-9586-4a91de0e7311  2019-09-23T11:25:26.436-04:00   \n",
       "4       96913629-c1c9-4503-9586-4a91de0e7311  2019-09-23T11:25:26.434-04:00   \n",
       "...                                      ...                            ...   \n",
       "292362  7bfb883a-7214-486f-922a-0ed019c85579  2019-09-25T11:11:07.717-04:00   \n",
       "292363  98031f5d-d754-4547-8bbb-b8a04f537037  2019-09-25T11:12:13.911-04:00   \n",
       "292364  98031f5d-d754-4547-8bbb-b8a04f537037  2019-09-25T11:12:14.925-04:00   \n",
       "292365  98031f5d-d754-4547-8bbb-b8a04f537037  2019-09-25T11:12:15.941-04:00   \n",
       "292366  98031f5d-d754-4547-8bbb-b8a04f537037  2019-09-25T11:12:16.956-04:00   \n",
       "\n",
       "         object   action  \n",
       "0       PROCESS   CREATE  \n",
       "1         SHELL  COMMAND  \n",
       "2         SHELL  COMMAND  \n",
       "3         SHELL  COMMAND  \n",
       "4         SHELL  COMMAND  \n",
       "...         ...      ...  \n",
       "292362     FLOW    START  \n",
       "292363     FLOW    START  \n",
       "292364     FLOW    START  \n",
       "292365     FLOW    START  \n",
       "292366     FLOW    START  \n",
       "\n",
       "[292367 rows x 7 columns]"
      ]
     },
     "execution_count": 9,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "label_df"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 48,
   "metadata": {},
   "outputs": [],
   "source": [
    "\n",
    "nodes_attack={}\n",
    "edges_attack_list=[]\n",
    "\n",
    "for idx,row in label_df.iterrows():\n",
    "    flag=False\n",
    "    if row['objectID'] in node_uuid2path:\n",
    "        nodes_attack[row['objectID']]=node_uuid2path[row['objectID']]\n",
    "        flag=True\n",
    "    if row['actorID'] in node_uuid2path:\n",
    "        nodes_attack[row['actorID']]=node_uuid2path[row['actorID']]\n",
    "        flag=True\n",
    "    if flag and row['action'] in edge2vec:    \n",
    "#         and row['action'] in edge2vec\n",
    "        temp_dic={}\n",
    "        temp_dic['src_uuid']=row['actorID']\n",
    "        temp_dic['dst_uuid']=row['objectID']\n",
    "        temp_dic['edge_type']=row['action']\n",
    "        temp_dic['timestamp']=datetime_to_timestamp_US(row['timestamp'])\n",
    "\n",
    "        edges_attack_list.append(temp_dic)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 49,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "33504"
      ]
     },
     "execution_count": 49,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "len(edges_attack_list)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 50,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "16047"
      ]
     },
     "execution_count": 50,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "len(nodes_attack)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Statistics (Num of nodes and edges)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 59,
   "metadata": {},
   "outputs": [],
   "source": [
    "graph_9_22_h201=torch.load(\"./data/evaluation/9_22_host=SysClient0201_datalabel=benign.TemporalData\")\n",
    "graph_9_22_h402=torch.load(\"./data/evaluation/9_22_host=SysClient0402_datalabel=benign.TemporalData\")\n",
    "graph_9_22_h660=torch.load(\"./data/evaluation/9_22_host=SysClient0660_datalabel=benign.TemporalData\")\n",
    "graph_9_22_h501=torch.load(\"./data/evaluation/9_22_host=SysClient0501_datalabel=benign.TemporalData\")\n",
    "graph_9_22_h051=torch.load(\"./data/evaluation/9_22_host=SysClient0051_datalabel=benign.TemporalData\")\n",
    "graph_9_22_h209=torch.load(\"./data/evaluation/9_22_host=SysClient0209_datalabel=benign.TemporalData\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 53,
   "metadata": {},
   "outputs": [],
   "source": [
    "graph_9_23_h201=torch.load(\"./data/evaluation/9_23_host=SysClient0201_datalabel=evaluation.TemporalData\")\n",
    "graph_9_24_h201=torch.load(\"./data/evaluation/9_24_host=SysClient0201_datalabel=evaluation.TemporalData\")\n",
    "graph_9_25_h201=torch.load(\"./data/evaluation/9_25_host=SysClient0201_datalabel=evaluation.TemporalData\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 54,
   "metadata": {},
   "outputs": [],
   "source": [
    "graph_9_23_h402=torch.load(\"./data/evaluation/9_23_host=SysClient0402_datalabel=evaluation.TemporalData\")\n",
    "graph_9_24_h402=torch.load(\"./data/evaluation/9_24_host=SysClient0402_datalabel=evaluation.TemporalData\")\n",
    "graph_9_25_h402=torch.load(\"./data/evaluation/9_25_host=SysClient0402_datalabel=evaluation.TemporalData\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 55,
   "metadata": {},
   "outputs": [],
   "source": [
    "graph_9_23_h660=torch.load(\"./data/evaluation/9_23_host=SysClient0660_datalabel=evaluation.TemporalData\")\n",
    "graph_9_24_h660=torch.load(\"./data/evaluation/9_24_host=SysClient0660_datalabel=evaluation.TemporalData\")\n",
    "graph_9_25_h660=torch.load(\"./data/evaluation/9_25_host=SysClient0660_datalabel=evaluation.TemporalData\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 56,
   "metadata": {},
   "outputs": [],
   "source": [
    "graph_9_23_h501=torch.load(\"./data/evaluation/9_23_host=SysClient0501_datalabel=evaluation.TemporalData\")\n",
    "graph_9_24_h501=torch.load(\"./data/evaluation/9_24_host=SysClient0501_datalabel=evaluation.TemporalData\")\n",
    "graph_9_25_h501=torch.load(\"./data/evaluation/9_25_host=SysClient0501_datalabel=evaluation.TemporalData\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 57,
   "metadata": {},
   "outputs": [],
   "source": [
    "graph_9_23_h051=torch.load(\"./data/evaluation/9_23_host=SysClient0051_datalabel=evaluation.TemporalData\")\n",
    "graph_9_24_h051=torch.load(\"./data/evaluation/9_24_host=SysClient0051_datalabel=evaluation.TemporalData\")\n",
    "graph_9_25_h051=torch.load(\"./data/evaluation/9_25_host=SysClient0051_datalabel=evaluation.TemporalData\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 58,
   "metadata": {},
   "outputs": [],
   "source": [
    "graph_9_23_h207=torch.load(\"./data/evaluation/9_23_host=SysClient0207_datalabel=evaluation.TemporalData\")\n",
    "graph_9_24_h207=torch.load(\"./data/evaluation/9_24_host=SysClient0207_datalabel=evaluation.TemporalData\")\n",
    "graph_9_25_h207=torch.load(\"./data/evaluation/9_25_host=SysClient0207_datalabel=evaluation.TemporalData\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 60,
   "metadata": {},
   "outputs": [],
   "source": [
    "graphs=[\n",
    "    graph_9_22_h201,\n",
    "    graph_9_22_h402,\n",
    "    graph_9_22_h660,\n",
    "    graph_9_22_h501,\n",
    "    graph_9_22_h051,\n",
    "    graph_9_22_h209,\n",
    "    \n",
    "    graph_9_23_h201,\n",
    "    graph_9_24_h201,\n",
    "    graph_9_25_h201,\n",
    "    \n",
    "    graph_9_23_h402,\n",
    "    graph_9_24_h402,\n",
    "    graph_9_25_h402,\n",
    "    \n",
    "    graph_9_23_h660,\n",
    "    graph_9_24_h660,\n",
    "    graph_9_25_h660,\n",
    "    \n",
    "    graph_9_23_h501,\n",
    "    graph_9_24_h501,\n",
    "    graph_9_25_h501,\n",
    "    \n",
    "    graph_9_23_h051,\n",
    "    graph_9_24_h051,\n",
    "    graph_9_25_h051,\n",
    "    \n",
    "    graph_9_23_h207,\n",
    "    graph_9_24_h207,\n",
    "    graph_9_25_h207,\n",
    "]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 71,
   "metadata": {},
   "outputs": [],
   "source": [
    "\n",
    "edges_count=0\n",
    "for g in graphs:\n",
    "     edges_count+=len(g.t)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 72,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "74989583"
      ]
     },
     "execution_count": 72,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "edges_count"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 77,
   "metadata": {},
   "outputs": [],
   "source": [
    "node_uuid2index_9_22_h201=torch.load(\"node_uuid2index_9_22_host=SysClient0201_datalabel=benign\")\n",
    "node_uuid2index_9_22_h402=torch.load(\"node_uuid2index_9_22_host=SysClient0402_datalabel=benign\")\n",
    "node_uuid2index_9_22_h660=torch.load(\"node_uuid2index_9_22_host=SysClient0660_datalabel=benign\")\n",
    "node_uuid2index_9_22_h501=torch.load(\"node_uuid2index_9_22_host=SysClient0501_datalabel=benign\")\n",
    "node_uuid2index_9_22_h051=torch.load(\"node_uuid2index_9_22_host=SysClient0051_datalabel=benign\")\n",
    "node_uuid2index_9_22_h209=torch.load(\"node_uuid2index_9_22_host=SysClient0209_datalabel=benign\")\n",
    "\n",
    "\n",
    "node_uuid2index_9_23_h201=torch.load(\"node_uuid2index_9_23_host=SysClient0201_datalabel=evaluation\")\n",
    "node_uuid2index_9_24_h201=torch.load(\"node_uuid2index_9_24_host=SysClient0201_datalabel=evaluation\")\n",
    "node_uuid2index_9_25_h201=torch.load(\"node_uuid2index_9_25_host=SysClient0201_datalabel=evaluation\")\n",
    "\n",
    "node_uuid2index_9_23_h402=torch.load(\"node_uuid2index_9_23_host=SysClient0402_datalabel=evaluation\")\n",
    "node_uuid2index_9_24_h402=torch.load(\"node_uuid2index_9_24_host=SysClient0402_datalabel=evaluation\")\n",
    "node_uuid2index_9_25_h402=torch.load(\"node_uuid2index_9_25_host=SysClient0402_datalabel=evaluation\")\n",
    "\n",
    "node_uuid2index_9_23_h660=torch.load(\"node_uuid2index_9_23_host=SysClient0660_datalabel=evaluation\")\n",
    "node_uuid2index_9_24_h660=torch.load(\"node_uuid2index_9_24_host=SysClient0660_datalabel=evaluation\")\n",
    "node_uuid2index_9_25_h660=torch.load(\"node_uuid2index_9_25_host=SysClient0660_datalabel=evaluation\")\n",
    "\n",
    "node_uuid2index_9_23_h501=torch.load(\"node_uuid2index_9_23_host=SysClient0501_datalabel=evaluation\")\n",
    "node_uuid2index_9_24_h501=torch.load(\"node_uuid2index_9_24_host=SysClient0501_datalabel=evaluation\")\n",
    "node_uuid2index_9_25_h501=torch.load(\"node_uuid2index_9_25_host=SysClient0501_datalabel=evaluation\")\n",
    "\n",
    "node_uuid2index_9_23_h051=torch.load(\"node_uuid2index_9_23_host=SysClient0051_datalabel=evaluation\")\n",
    "node_uuid2index_9_24_h051=torch.load(\"node_uuid2index_9_24_host=SysClient0051_datalabel=evaluation\")\n",
    "node_uuid2index_9_25_h051=torch.load(\"node_uuid2index_9_25_host=SysClient0051_datalabel=evaluation\")\n",
    "\n",
    "node_uuid2index_9_23_h207=torch.load(\"node_uuid2index_9_23_host=SysClient0207_datalabel=evaluation\")\n",
    "node_uuid2index_9_24_h207=torch.load(\"node_uuid2index_9_24_host=SysClient0207_datalabel=evaluation\")\n",
    "node_uuid2index_9_25_h207=torch.load(\"node_uuid2index_9_25_host=SysClient0207_datalabel=evaluation\")\n",
    "\n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 78,
   "metadata": {},
   "outputs": [],
   "source": [
    "node_dics=[\n",
    "    node_uuid2index_9_22_h201,\n",
    "    node_uuid2index_9_22_h402,\n",
    "    node_uuid2index_9_22_h660,\n",
    "    node_uuid2index_9_22_h501,\n",
    "    node_uuid2index_9_22_h051,\n",
    "    node_uuid2index_9_22_h209,\n",
    "    node_uuid2index_9_23_h201,\n",
    "    node_uuid2index_9_24_h201,\n",
    "    node_uuid2index_9_25_h201,\n",
    "    node_uuid2index_9_23_h402,\n",
    "    node_uuid2index_9_24_h402,\n",
    "    node_uuid2index_9_25_h402,\n",
    "    node_uuid2index_9_23_h660,\n",
    "    node_uuid2index_9_24_h660,\n",
    "    node_uuid2index_9_25_h660,\n",
    "    node_uuid2index_9_23_h501,\n",
    "    node_uuid2index_9_24_h501,\n",
    "    node_uuid2index_9_25_h501,\n",
    "    node_uuid2index_9_23_h051,\n",
    "    node_uuid2index_9_24_h051,\n",
    "    node_uuid2index_9_25_h051,\n",
    "    node_uuid2index_9_23_h207,\n",
    "    node_uuid2index_9_24_h207,\n",
    "    node_uuid2index_9_25_h207,\n",
    "]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 86,
   "metadata": {},
   "outputs": [],
   "source": [
    "nodes=set()\n",
    "for dic in node_dics:\n",
    "    for n in dic:\n",
    "        if type(n)==str:\n",
    "            nodes.add(n)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 87,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "9485265"
      ]
     },
     "execution_count": 87,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "len(nodes)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python [conda env:pyg20]",
   "language": "python",
   "name": "conda-env-pyg20-py"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.13"
  },
  "toc": {
   "base_numbering": 1,
   "nav_menu": {},
   "number_sections": true,
   "sideBar": true,
   "skip_h1_title": false,
   "title_cell": "Table of Contents",
   "title_sidebar": "Contents",
   "toc_cell": false,
   "toc_position": {
    "height": "47.7109px",
    "left": "21px",
    "top": "204.141px",
    "width": "199.344px"
   },
   "toc_section_display": true,
   "toc_window_display": true
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
