{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "# encoding=utf-8\n",
    "import os.path as osp\n",
    "import os\n",
    "import copy\n",
    "import matplotlib.pyplot as plt\n",
    "import torch\n",
    "from torch.nn import Linear\n",
    "from sklearn.metrics import average_precision_score, roc_auc_score\n",
    "from torch_geometric.data import TemporalData\n",
    "from torch_geometric.datasets import JODIEDataset\n",
    "from torch_geometric.datasets import ICEWS18\n",
    "from torch_geometric.nn import TGNMemory, TransformerConv\n",
    "import torch.nn as nn\n",
    "import torch.nn.functional as F\n",
    "from torch_geometric.nn.models.tgn import (LastNeighborLoader, IdentityMessage, MeanAggregator,\n",
    "                                           LastAggregator)\n",
    "from torch_geometric import *\n",
    "from torch_geometric.utils import negative_sampling\n",
    "from tqdm import tqdm\n",
    "import networkx as nx\n",
    "import numpy as np\n",
    "import math\n",
    "import copy\n",
    "import re\n",
    "import time\n",
    "import json\n",
    "import pandas as pd\n",
    "from random import choice\n",
    "import gc\n",
    "\n",
    "# We conducted this experiment on CPU, including training, loading and testing models.\n",
    "# For reproducibility, we recommend users to load and test our models on CPU.\n",
    "device = 'cpu'\n",
    "# msg structure:    [src_node_feature,edge_attr,dst_node_feature]\n",
    "\n",
    "# compute the best partition \n",
    "import datetime\n",
    "# import community as community_louvain\n",
    "\n",
    "import xxhash\n",
    "# Find the edge index which the edge vector is corresponding to\n",
    "def tensor_find(t,x):\n",
    "    t_np=t.cpu().numpy()\n",
    "    idx=np.argwhere(t_np==x)\n",
    "    return idx[0][0]+1\n",
    "\n",
    "\n",
    "def std(t):\n",
    "    t = np.array(t)\n",
    "    return np.std(t)\n",
    "\n",
    "\n",
    "def var(t):\n",
    "    t = np.array(t)\n",
    "    return np.var(t)\n",
    "\n",
    "\n",
    "def mean(t):\n",
    "    t = np.array(t)\n",
    "    return np.mean(t)\n",
    "\n",
    "def hashgen(l):\n",
    "    \"\"\"Generate a single hash value from a list. @l is a list of\n",
    "    string values, which can be properties of a node/edge. This\n",
    "    function returns a single hashed integer value.\"\"\"\n",
    "    hasher = xxhash.xxh64()\n",
    "    for e in l:\n",
    "        hasher.update(e)\n",
    "    return hasher.intdigest()\n",
    "\n",
    "\n",
    "def cal_pos_edges_loss(link_pred_ratio):\n",
    "    loss=[]\n",
    "    for i in link_pred_ratio:\n",
    "        loss.append(criterion(i,torch.ones(1)))\n",
    "    return torch.tensor(loss)\n",
    "\n",
    "def cal_pos_edges_loss_multiclass(link_pred_ratio,labels):\n",
    "    loss=[] \n",
    "    for i in range(len(link_pred_ratio)):\n",
    "        loss.append(criterion(link_pred_ratio[i].reshape(1,-1),labels[i].reshape(-1)))\n",
    "    return torch.tensor(loss)\n",
    "\n",
    "def cal_pos_edges_loss_autoencoder(decoded,msg):\n",
    "    loss=[] \n",
    "    for i in range(len(decoded)):\n",
    "        loss.append(criterion(decoded[i].reshape(1,-1),msg[i].reshape(-1)))\n",
    "    return torch.tensor(loss)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "application/javascript": [
       "IPython.notebook.set_autosave_interval(120000)"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Autosaving every 120 seconds\n"
     ]
    }
   ],
   "source": [
    "%autosave 120  "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "from datetime import datetime, timezone\n",
    "import time\n",
    "import pytz\n",
    "from time import mktime\n",
    "from datetime import datetime\n",
    "import time\n",
    "def ns_time_to_datetime(ns):\n",
    "    \"\"\"\n",
    "    :param ns: int nano timestamp\n",
    "    :return: datetime   format: 2013-10-10 23:40:00.000000000\n",
    "    \"\"\"\n",
    "    dt = datetime.fromtimestamp(int(ns) // 1000000000)\n",
    "    s = dt.strftime('%Y-%m-%d %H:%M:%S')\n",
    "    s += '.' + str(int(int(ns) % 1000000000)).zfill(9)\n",
    "    return s\n",
    "\n",
    "def ns_time_to_datetime_US(ns):\n",
    "    \"\"\"\n",
    "    :param ns: int nano timestamp\n",
    "    :return: datetime   format: 2013-10-10 23:40:00.000000000\n",
    "    \"\"\"\n",
    "    tz = pytz.timezone('US/Eastern')\n",
    "    dt = pytz.datetime.datetime.fromtimestamp(int(ns) // 1000000000, tz)\n",
    "    s = dt.strftime('%Y-%m-%d %H:%M:%S')\n",
    "    s += '.' + str(int(int(ns) % 1000000000)).zfill(9)\n",
    "    return s\n",
    "\n",
    "def time_to_datetime_US(s):\n",
    "    \"\"\"\n",
    "    :param ns: int nano timestamp\n",
    "    :return: datetime   format: 2013-10-10 23:40:00\n",
    "    \"\"\"\n",
    "    tz = pytz.timezone('US/Eastern')\n",
    "    dt = pytz.datetime.datetime.fromtimestamp(int(s), tz)\n",
    "    s = dt.strftime('%Y-%m-%d %H:%M:%S')\n",
    "\n",
    "    return s\n",
    "\n",
    "def datetime_to_ns_time(date):\n",
    "    \"\"\"\n",
    "    :param date: str   format: %Y-%m-%d %H:%M:%S   e.g. 2013-10-10 23:40:00\n",
    "    :return: nano timestamp\n",
    "    \"\"\"\n",
    "    timeArray = time.strptime(date, \"%Y-%m-%d %H:%M:%S\")\n",
    "    timeStamp = int(time.mktime(timeArray))\n",
    "    timeStamp = timeStamp * 1000000000\n",
    "    return timeStamp\n",
    "\n",
    "def datetime_to_ns_time_US(date):\n",
    "    \"\"\"\n",
    "    :param date: str   format: %Y-%m-%d %H:%M:%S   e.g. 2013-10-10 23:40:00\n",
    "    :return: nano timestamp\n",
    "    \"\"\"\n",
    "    tz = pytz.timezone('US/Eastern')\n",
    "    timeArray = time.strptime(date, \"%Y-%m-%d %H:%M:%S\")\n",
    "    dt = datetime.fromtimestamp(mktime(timeArray))\n",
    "    timestamp = tz.localize(dt)\n",
    "    timestamp = timestamp.timestamp()\n",
    "    timeStamp = timestamp * 1000000000\n",
    "    return int(timeStamp)\n",
    "\n",
    "def datetime_to_timestamp_US(date):\n",
    "    \"\"\"\n",
    "    :param date: str   format: %Y-%m-%d %H:%M:%S   e.g. 2013-10-10 23:40:00\n",
    "    :return: nano timestamp\n",
    "    \"\"\"\n",
    "    tz = pytz.timezone('US/Eastern')\n",
    "    timeArray = time.strptime(date, \"%Y-%m-%d %H:%M:%S\")\n",
    "    dt = datetime.fromtimestamp(mktime(timeArray))\n",
    "    timestamp = tz.localize(dt)\n",
    "    timestamp = timestamp.timestamp()\n",
    "    timeStamp = timestamp\n",
    "    return int(timeStamp)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "import psycopg2\n",
    "\n",
    "from psycopg2 import extras as ex\n",
    "connect = psycopg2.connect(database = 'tc_e5_clearscope_dataset_db',\n",
    "                           host = '/var/run/postgresql/',\n",
    "                           user = 'postgres',\n",
    "                           password = 'postgres',\n",
    "                           port = '5432'\n",
    "                          )\n",
    "\n",
    "cur = connect.cursor()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "graph_5_8=torch.load(\"./train_graphs/graph_5_8.TemporalData.simple\").to(device=device)\n",
    "graph_5_9=torch.load(\"./train_graphs/graph_5_9.TemporalData.simple\").to(device=device)\n",
    "graph_5_11=torch.load(\"./train_graphs/graph_5_11.TemporalData.simple\").to(device=device)\n",
    "\n",
    "\n",
    "train_data=graph_5_8"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Constructing the map for nodeid to msg\n",
    "sql=\"select * from node2id ORDER BY index_id;\"\n",
    "cur.execute(sql)\n",
    "rows = cur.fetchall()\n",
    "\n",
    "nodeid2msg={}  # nodeid => msg and node hash => nodeid\n",
    "for i in rows:\n",
    "    nodeid2msg[i[0]]=i[-1]\n",
    "    nodeid2msg[i[-1]]={i[1]:i[2]}  "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "rel2id={1: 'EVENT_ACCEPT',\n",
    " 'EVENT_ACCEPT': 1,\n",
    " 2: 'EVENT_CLONE',\n",
    " 'EVENT_CLONE': 2,\n",
    " 3: 'EVENT_CLOSE',\n",
    " 'EVENT_CLOSE': 3,\n",
    " 4: 'EVENT_CREATE_OBJECT',\n",
    " 'EVENT_CREATE_OBJECT': 4,\n",
    " 5: 'EVENT_EXECUTE',\n",
    " 'EVENT_EXECUTE': 5,\n",
    " 6: 'EVENT_OPEN',\n",
    " 'EVENT_OPEN': 6,\n",
    " 7: 'EVENT_READ',\n",
    " 'EVENT_READ': 7,\n",
    " 8: 'EVENT_RECVFROM',\n",
    " 'EVENT_RECVFROM': 8,\n",
    " 9: 'EVENT_SENDTO',\n",
    " 'EVENT_SENDTO': 9,\n",
    " 10: 'EVENT_WRITE',\n",
    " 'EVENT_WRITE': 10}"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "# train_data, val_data, test_data = data.train_val_test_split(val_ratio=0.15, test_ratio=0.15)\n",
    "# max_node_num = max(torch.cat([data.dst,data.src]))+1\n",
    "# max_node_num = data.num_nodes+1\n",
    "max_node_num = 139961  # +1\n",
    "# min_dst_idx, max_dst_idx = int(data.dst.min()), int(data.dst.max())\n",
    "min_dst_idx, max_dst_idx = 0, max_node_num\n",
    "neighbor_loader = LastNeighborLoader(max_node_num, size=20, device=device)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "class GraphAttentionEmbedding(torch.nn.Module):\n",
    "    def __init__(self, in_channels, out_channels, msg_dim, time_enc):\n",
    "        super(GraphAttentionEmbedding, self).__init__()\n",
    "        self.time_enc = time_enc\n",
    "        edge_dim = msg_dim + time_enc.out_channels\n",
    "        self.conv = TransformerConv(in_channels, out_channels, heads=8,\n",
    "                                    dropout=0.0, edge_dim=edge_dim)\n",
    "        self.conv2 = TransformerConv(in_channels*8, out_channels,heads=1, concat=False,\n",
    "                             dropout=0.0, edge_dim=edge_dim)\n",
    "\n",
    "    def forward(self, x, last_update, edge_index, t, msg):\n",
    "        last_update.to(device)\n",
    "        x = x.to(device)\n",
    "        t = t.to(device)\n",
    "        rel_t = last_update[edge_index[0]] - t\n",
    "        rel_t_enc = self.time_enc(rel_t.to(x.dtype))\n",
    "        edge_attr = torch.cat([rel_t_enc, msg], dim=-1)\n",
    "        x = F.relu(self.conv(x, edge_index, edge_attr))\n",
    "        x = F.relu(self.conv2(x, edge_index, edge_attr))\n",
    "        return x\n",
    "\n",
    "class LinkPredictor(torch.nn.Module):\n",
    "    def __init__(self, in_channels):\n",
    "        super(LinkPredictor, self).__init__()\n",
    "        self.lin_src = Linear(in_channels, in_channels*2)\n",
    "        self.lin_dst = Linear(in_channels, in_channels*2)\n",
    "       \n",
    "        self.lin_seq = nn.Sequential(\n",
    "            Linear(in_channels * 4, in_channels * 8),\n",
    "            torch.nn.BatchNorm1d(in_channels * 8),\n",
    "            torch.nn.Dropout(0.5),\n",
    "            nn.Tanh(),\n",
    "            Linear(in_channels * 8, in_channels * 2),\n",
    "            torch.nn.BatchNorm1d(in_channels * 2),\n",
    "            torch.nn.Dropout(0.5),\n",
    "            nn.Tanh(),\n",
    "            Linear(in_channels * 2, int(in_channels // 2)),\n",
    "            torch.nn.BatchNorm1d(int(in_channels // 2)),\n",
    "            torch.nn.Dropout(0.5),\n",
    "            nn.Tanh(),\n",
    "            Linear(int(in_channels // 2), train_data.msg.shape[1] - 32)\n",
    "        )\n",
    "\n",
    "    def forward(self, z_src, z_dst):\n",
    "        h = torch.cat([self.lin_src(z_src) , self.lin_dst(z_dst)],dim=-1)      \n",
    "         \n",
    "        h = self.lin_seq (h)\n",
    "        \n",
    "        return h\n",
    "        \n",
    "\n",
    "\n",
    "\n",
    "memory_dim = 100         # node state\n",
    "time_dim = 100\n",
    "embedding_dim = 100      # edge embedding\n",
    "\n",
    "memory = TGNMemory(\n",
    "    max_node_num,\n",
    "    train_data.msg.size(-1),\n",
    "    memory_dim,\n",
    "    time_dim,\n",
    "    message_module=IdentityMessage(train_data.msg.size(-1), memory_dim, time_dim),\n",
    "    aggregator_module=LastAggregator(),\n",
    ").to(device)\n",
    "\n",
    "gnn = GraphAttentionEmbedding(\n",
    "    in_channels=memory_dim,\n",
    "    out_channels=embedding_dim,\n",
    "    msg_dim=train_data.msg.size(-1),\n",
    "    time_enc=memory.time_enc,\n",
    ").to(device)\n",
    "\n",
    "link_pred = LinkPredictor(in_channels=embedding_dim).to(device)\n",
    "\n",
    "optimizer = torch.optim.Adam(\n",
    "    set(memory.parameters()) | set(gnn.parameters())\n",
    "    | set(link_pred.parameters()), lr=0.00005, eps=1e-08,weight_decay=0.01)\n",
    "\n",
    "\n",
    "# scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=5, gamma=0.1)\n",
    "criterion = nn.CrossEntropyLoss()\n",
    "\n",
    "# Helper vector to map global node indices to local ones.\n",
    "assoc = torch.empty(max_node_num, dtype=torch.long, device=device)\n",
    "\n",
    "saved_nodes=set()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    "BATCH=1024\n",
    "def train(train_data):\n",
    "\n",
    "    \n",
    "    memory.train()\n",
    "    gnn.train()\n",
    "    link_pred.train()\n",
    "\n",
    "    memory.reset_state()  # Start with a fresh memory.\n",
    "    neighbor_loader.reset_state()  # Start with an empty graph.\n",
    "    saved_nodes=set()\n",
    "\n",
    "    total_loss = 0\n",
    "    \n",
    "#     print(\"train_before_stage_data:\",train_data)\n",
    "    for batch in train_data.seq_batches(batch_size=BATCH):\n",
    "        optimizer.zero_grad()\n",
    "\n",
    "        src, pos_dst, t, msg = batch.src, batch.dst, batch.t, batch.msg        \n",
    "        \n",
    "        n_id = torch.cat([src, pos_dst]).unique()\n",
    "#         n_id = torch.cat([src, pos_dst, neg_src, neg_dst]).unique()\n",
    "        n_id, edge_index, e_id = neighbor_loader(n_id)\n",
    "        assoc[n_id] = torch.arange(n_id.size(0), device=device)\n",
    "\n",
    "        # Get updated memory of all nodes involved in the computation.\n",
    "        z, last_update = memory(n_id)\n",
    "      \n",
    "        z = gnn(z, last_update, edge_index, train_data.t[e_id], train_data.msg[e_id])\n",
    "        \n",
    "        pos_out = link_pred(z[assoc[src]], z[assoc[pos_dst]])       \n",
    "\n",
    "        y_pred = torch.cat([pos_out], dim=0)\n",
    "        \n",
    "#         y_true = torch.cat([torch.zeros(pos_out.size(0),1),torch.ones(neg_out.size(0),1)], dim=0)\n",
    "        y_true=[]\n",
    "        for m in msg:\n",
    "            l=tensor_find(m[16:-16],1)-1\n",
    "            y_true.append(l)           \n",
    "          \n",
    "        y_true = torch.tensor(y_true).to(device=device)\n",
    "        y_true=y_true.reshape(-1).to(torch.long).to(device=device)\n",
    "        \n",
    "        loss = criterion(y_pred, y_true)\n",
    "        \n",
    "#         loss = criterion(pos_out, torch.ones_like(pos_out))\n",
    "#         loss += criterion(neg_out, torch.zeros_like(neg_out))\n",
    "\n",
    "        # Update memory and neighbor loader with ground-truth state.\n",
    "        memory.update_state(src, pos_dst, t, msg)\n",
    "        neighbor_loader.insert(src, pos_dst)\n",
    "        \n",
    "#         for i in range(len(src)):\n",
    "#             saved_nodes.add(int(src[i]))\n",
    "#             saved_nodes.add(int(pos_dst[i]))\n",
    "\n",
    "        loss.backward()\n",
    "        optimizer.step()\n",
    "        memory.detach()\n",
    "#         print(z.shape)\n",
    "        total_loss += float(loss) * batch.num_events\n",
    "#     print(\"trained_stage_data:\",train_data)\n",
    "    return total_loss / train_data.num_events\n",
    "\n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "scrolled": false
   },
   "outputs": [],
   "source": [
    "train_graphs=[graph_5_8, graph_5_9, graph_5_11]\n",
    "\n",
    "for epoch in tqdm(range(1, 31)):\n",
    "    for g in train_graphs:\n",
    "        loss = train(g)\n",
    "        print(f'  Epoch: {epoch:02d}, Loss: {loss:.4f}')\n",
    "#     scheduler.step()\n",
    "model=[memory,gnn, link_pred,neighbor_loader]\n",
    "os.system(\"mkdir -p ./models/\")\n",
    "torch.save(model,\"./models/models.pt\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Test"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [],
   "source": [
    "import time \n",
    "\n",
    "@torch.no_grad()\n",
    "def test_day_new(inference_data,path):\n",
    "    if os.path.exists(path):\n",
    "        pass\n",
    "    else:\n",
    "        os.mkdir(path)\n",
    "    \n",
    "    memory.eval()\n",
    "    gnn.eval()\n",
    "    link_pred.eval()\n",
    "    \n",
    "    memory.reset_state()  # Start with a fresh memory. \n",
    "    neighbor_loader.reset_state()  # Start with an empty graph.\n",
    "    \n",
    "    time_with_loss={}\n",
    "    total_loss = 0    \n",
    "    edge_list=[]\n",
    "    \n",
    "    unique_nodes=torch.tensor([]).to(device=device)\n",
    "    total_edges=0\n",
    "\n",
    "\n",
    "    start_time=inference_data.t[0]\n",
    "    event_count=0\n",
    "    \n",
    "    pos_o=[]\n",
    "    \n",
    "    loss_list=[]\n",
    "    \n",
    "\n",
    "    print(\"after merge:\",inference_data)\n",
    "    \n",
    "    # Record the running time to evaluate the performance\n",
    "    start = time.perf_counter()\n",
    "\n",
    "    for batch in inference_data.seq_batches(batch_size=BATCH):\n",
    "        \n",
    "        src, pos_dst, t, msg = batch.src, batch.dst, batch.t, batch.msg\n",
    "        unique_nodes=torch.cat([unique_nodes,src,pos_dst]).unique()\n",
    "        total_edges+=BATCH\n",
    "        \n",
    "       \n",
    "        n_id = torch.cat([src, pos_dst]).unique()       \n",
    "        n_id, edge_index, e_id = neighbor_loader(n_id)\n",
    "        assoc[n_id] = torch.arange(n_id.size(0), device=device)\n",
    "\n",
    "        z, last_update = memory(n_id)\n",
    "        z = gnn(z, last_update, edge_index, inference_data.t[e_id], inference_data.msg[e_id])\n",
    "\n",
    "        pos_out = link_pred(z[assoc[src]], z[assoc[pos_dst]])\n",
    "        \n",
    "        pos_o.append(pos_out)\n",
    "        y_pred = torch.cat([pos_out], dim=0)\n",
    "#         y_true = torch.cat(\n",
    "#             [torch.ones(pos_out.size(0))], dim=0).to(torch.long)     \n",
    "#         y_true=y_true.reshape(-1).to(torch.long)\n",
    "\n",
    "        y_true=[]\n",
    "        for m in msg:\n",
    "            l=tensor_find(m[16:-16],1)-1\n",
    "            y_true.append(l) \n",
    "        y_true = torch.tensor(y_true).to(device=device)\n",
    "        y_true=y_true.reshape(-1).to(torch.long).to(device=device)\n",
    "\n",
    "        # Only consider which edge hasn't been correctly predicted.\n",
    "        # For benign graphs, the behaviors patterns are similar and therefore their losses are small\n",
    "        # For anoamlous behaviors, some behaviors might not be seen before, so the probability of predicting those edges are low. Thus their losses are high.\n",
    "        loss = criterion(y_pred, y_true)\n",
    "\n",
    "        total_loss += float(loss) * batch.num_events\n",
    "     \n",
    "        \n",
    "        # update the edges in the batch to the memory and neighbor_loader\n",
    "        memory.update_state(src, pos_dst, t, msg)\n",
    "        neighbor_loader.insert(src, pos_dst)\n",
    "        \n",
    "        # compute the loss for each edge\n",
    "        each_edge_loss= cal_pos_edges_loss_multiclass(pos_out,y_true)\n",
    "        \n",
    "        for i in range(len(pos_out)):\n",
    "            srcnode=int(src[i])\n",
    "            dstnode=int(pos_dst[i])  \n",
    "            \n",
    "            srcmsg=str(nodeid2msg[srcnode]) \n",
    "            dstmsg=str(nodeid2msg[dstnode])\n",
    "            t_var=int(t[i])\n",
    "            edgeindex=tensor_find(msg[i][16:-16],1)   \n",
    "            edge_type=rel2id[edgeindex]\n",
    "            loss=each_edge_loss[i]    \n",
    "\n",
    "            temp_dic={}\n",
    "            temp_dic['loss']=float(loss)\n",
    "            temp_dic['srcnode']=srcnode\n",
    "            temp_dic['dstnode']=dstnode\n",
    "            temp_dic['srcmsg']=srcmsg\n",
    "            temp_dic['dstmsg']=dstmsg\n",
    "            temp_dic['edge_type']=edge_type\n",
    "            temp_dic['time']=t_var\n",
    "            \n",
    "#             if \"netflow\" in srcmsg or \"netflow\" in dstmsg:\n",
    "#                 temp_dic['loss']=0\n",
    "            edge_list.append(temp_dic)\n",
    "        \n",
    "        event_count+=len(batch.src)\n",
    "        if t[-1]>start_time+60000000000*15:\n",
    "            # Here is a checkpoint, which records all edge losses in the current time window\n",
    "#             loss=total_loss/event_count\n",
    "            time_interval=ns_time_to_datetime_US(start_time)+\"~\"+ns_time_to_datetime_US(t[-1])\n",
    "\n",
    "            end = time.perf_counter()\n",
    "            time_with_loss[time_interval]={'loss':loss,\n",
    "                                \n",
    "                                          'nodes_count':len(unique_nodes),\n",
    "                                          'total_edges':total_edges,\n",
    "                                          'costed_time':(end-start)}\n",
    "            \n",
    "            \n",
    "            log=open(path+\"/\"+time_interval+\".txt\",'w')\n",
    "            \n",
    "            for e in edge_list: \n",
    "#                 temp_key=e['srcmsg']+e['dstmsg']+e['edge_type']\n",
    "#                 if temp_key in train_edge_set:      \n",
    "# #                     e['loss']=(e['loss']-train_edge_set[temp_key]) if e['loss']>=train_edge_set[temp_key] else 0  \n",
    "# #                     e['loss']=abs(e['loss']-train_edge_set[temp_key])\n",
    "                    \n",
    "#                     e['modified']=True\n",
    "#                 else:\n",
    "#                     e['modified']=False\n",
    "                loss+=e['loss']\n",
    "\n",
    "            loss=loss/event_count   \n",
    "            print(f'Time: {time_interval}, Loss: {loss:.4f}, Nodes_count: {len(unique_nodes)}, Cost Time: {(end-start):.2f}s')\n",
    "            edge_list = sorted(edge_list, key=lambda x:x['loss'],reverse=True)   \n",
    "            for e in edge_list: \n",
    "                log.write(str(e))\n",
    "                log.write(\"\\n\") \n",
    "            event_count=0\n",
    "            total_loss=0\n",
    "            loss=0\n",
    "            start_time=t[-1]\n",
    "            log.close()\n",
    "            edge_list.clear()\n",
    "            \n",
    " \n",
    "    return time_with_loss\n",
    "\n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [],
   "source": [
    "model=torch.load(\"./models/models.pt\", map_location=device)\n",
    "memory,gnn, link_pred,neighbor_loader=model\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "graph_5_12=torch.load(\"./train_graphs/graph_5_12.TemporalData.simple\").to(device=device)\n",
    "graph_5_14=torch.load(\"./train_graphs/graph_5_14.TemporalData.simple\").to(device=device)\n",
    "graph_5_15=torch.load(\"./train_graphs/graph_5_15.TemporalData.simple\").to(device=device)\n",
    "graph_5_17=torch.load(\"./train_graphs/graph_5_17.TemporalData.simple\").to(device=device)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "after merge: TemporalData(dst=[6060013], msg=[6060013, 42], src=[6060013], t=[6060013])\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/yinyuanl/anaconda3/envs/kairos/lib/python3.9/site-packages/torch_geometric/nn/conv/transformer_conv.py:211: UserWarning: operator() profile_node %28 : int[] = prim::profile_ivalue(%size.4)\n",
      " does not have profile information (Triggered internally at /opt/conda/conda-bld/pytorch_1670525539683/work/torch/csrc/jit/codegen/cuda/graph_fuser.cpp:105.)\n",
      "  alpha = softmax(alpha, index, ptr, size_i)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Time: 2019-05-08 00:00:00.152000000~2019-05-08 00:16:37.159000000, Loss: 0.7120, Nodes_count: 1027, Cost Time: 7.89s\n",
      "Time: 2019-05-08 00:16:37.159000000~2019-05-08 00:32:09.988000000, Loss: 0.5363, Nodes_count: 1137, Cost Time: 21.50s\n",
      "Time: 2019-05-08 00:32:09.988000000~2019-05-08 00:47:11.359000000, Loss: 0.5497, Nodes_count: 1192, Cost Time: 26.66s\n",
      "Time: 2019-05-08 00:47:11.359000000~2019-05-08 01:02:12.559000000, Loss: 0.4982, Nodes_count: 1247, Cost Time: 42.39s\n",
      "Time: 2019-05-08 01:02:12.559000000~2019-05-08 01:17:13.977000000, Loss: 0.5899, Nodes_count: 1300, Cost Time: 47.65s\n",
      "Time: 2019-05-08 01:17:13.977000000~2019-05-08 01:32:15.057000000, Loss: 0.5042, Nodes_count: 1367, Cost Time: 63.72s\n",
      "Time: 2019-05-08 01:32:15.057000000~2019-05-08 01:47:16.557000000, Loss: 0.5488, Nodes_count: 1436, Cost Time: 68.68s\n",
      "Time: 2019-05-08 01:47:16.557000000~2019-05-08 02:02:17.924000000, Loss: 0.5127, Nodes_count: 1505, Cost Time: 85.88s\n",
      "Time: 2019-05-08 02:02:17.924000000~2019-05-08 02:17:19.367000000, Loss: 0.6190, Nodes_count: 1559, Cost Time: 91.01s\n",
      "Time: 2019-05-08 02:17:19.367000000~2019-05-08 02:32:20.360000000, Loss: 0.5169, Nodes_count: 1625, Cost Time: 107.65s\n",
      "Time: 2019-05-08 02:32:20.360000000~2019-05-08 02:47:21.772000000, Loss: 0.6250, Nodes_count: 1678, Cost Time: 112.10s\n",
      "Time: 2019-05-08 02:47:21.772000000~2019-05-08 03:02:23.199000000, Loss: 0.5032, Nodes_count: 1764, Cost Time: 128.41s\n",
      "Time: 2019-05-08 03:02:23.199000000~2019-05-08 03:17:24.642000000, Loss: 0.5397, Nodes_count: 1830, Cost Time: 133.42s\n",
      "Time: 2019-05-08 03:17:24.642000000~2019-05-08 03:32:25.914000000, Loss: 0.5436, Nodes_count: 1891, Cost Time: 148.60s\n",
      "Time: 2019-05-08 03:32:25.914000000~2019-05-08 03:47:27.431000000, Loss: 0.6217, Nodes_count: 1959, Cost Time: 153.18s\n",
      "Time: 2019-05-08 03:47:27.431000000~2019-05-08 04:02:28.619000000, Loss: 0.5314, Nodes_count: 2029, Cost Time: 167.90s\n",
      "Time: 2019-05-08 04:02:28.619000000~2019-05-08 04:17:30.164000000, Loss: 0.5825, Nodes_count: 2090, Cost Time: 173.16s\n",
      "Time: 2019-05-08 04:17:30.164000000~2019-05-08 04:32:31.199000000, Loss: 0.5123, Nodes_count: 2147, Cost Time: 188.03s\n",
      "Time: 2019-05-08 04:32:31.199000000~2019-05-08 04:47:32.588000000, Loss: 0.5878, Nodes_count: 2209, Cost Time: 192.81s\n",
      "Time: 2019-05-08 04:47:32.588000000~2019-05-08 05:02:34.051000000, Loss: 0.5085, Nodes_count: 2267, Cost Time: 207.41s\n",
      "Time: 2019-05-08 05:02:34.051000000~2019-05-08 05:17:35.498000000, Loss: 0.5552, Nodes_count: 2325, Cost Time: 212.72s\n",
      "Time: 2019-05-08 05:17:35.498000000~2019-05-08 05:32:36.280000000, Loss: 0.5121, Nodes_count: 2380, Cost Time: 228.35s\n",
      "Time: 2019-05-08 05:32:36.280000000~2019-05-08 05:47:37.437000000, Loss: 0.5543, Nodes_count: 2447, Cost Time: 233.47s\n",
      "Time: 2019-05-08 05:47:37.437000000~2019-05-08 06:02:38.450000000, Loss: 0.5002, Nodes_count: 2505, Cost Time: 249.80s\n",
      "Time: 2019-05-08 06:02:38.450000000~2019-05-08 06:17:39.817000000, Loss: 0.5614, Nodes_count: 2554, Cost Time: 254.44s\n",
      "Time: 2019-05-08 06:17:39.817000000~2019-05-08 06:32:40.809000000, Loss: 0.5055, Nodes_count: 2611, Cost Time: 270.63s\n",
      "Time: 2019-05-08 06:32:40.809000000~2019-05-08 06:47:42.207000000, Loss: 0.5480, Nodes_count: 2671, Cost Time: 275.61s\n",
      "Time: 2019-05-08 06:47:42.207000000~2019-05-08 07:02:43.353000000, Loss: 0.5189, Nodes_count: 2734, Cost Time: 293.47s\n",
      "Time: 2019-05-08 07:02:43.353000000~2019-05-08 07:17:44.680000000, Loss: 0.6432, Nodes_count: 2790, Cost Time: 298.30s\n",
      "Time: 2019-05-08 07:17:44.680000000~2019-05-08 07:32:46.096000000, Loss: 0.5025, Nodes_count: 2846, Cost Time: 315.91s\n",
      "Time: 2019-05-08 07:32:46.096000000~2019-05-08 07:47:47.430000000, Loss: 0.5828, Nodes_count: 2908, Cost Time: 320.75s\n",
      "Time: 2019-05-08 07:47:47.430000000~2019-05-08 08:02:48.504000000, Loss: 0.5172, Nodes_count: 2967, Cost Time: 338.92s\n",
      "Time: 2019-05-08 08:02:48.504000000~2019-05-08 08:17:49.939000000, Loss: 0.5612, Nodes_count: 3019, Cost Time: 344.18s\n",
      "Time: 2019-05-08 08:17:49.939000000~2019-05-08 08:32:51.379000000, Loss: 0.5242, Nodes_count: 3091, Cost Time: 362.97s\n",
      "Time: 2019-05-08 08:32:51.379000000~2019-05-08 08:47:52.723000000, Loss: 0.6438, Nodes_count: 3154, Cost Time: 367.70s\n",
      "Time: 2019-05-08 08:47:52.723000000~2019-05-08 09:02:52.868000000, Loss: 0.6147, Nodes_count: 3488, Cost Time: 393.07s\n",
      "Time: 2019-05-08 09:02:52.868000000~2019-05-08 09:17:55.365000000, Loss: 0.6676, Nodes_count: 3940, Cost Time: 428.80s\n",
      "Time: 2019-05-08 09:17:55.365000000~2019-05-08 09:32:56.762000000, Loss: 0.6533, Nodes_count: 4796, Cost Time: 472.15s\n",
      "Time: 2019-05-08 09:32:56.762000000~2019-05-08 09:47:58.228000000, Loss: 0.6485, Nodes_count: 5875, Cost Time: 515.38s\n",
      "Time: 2019-05-08 09:47:58.228000000~2019-05-08 10:02:59.441000000, Loss: 0.6558, Nodes_count: 6831, Cost Time: 569.61s\n",
      "Time: 2019-05-08 10:02:59.441000000~2019-05-08 10:18:00.815000000, Loss: 0.6738, Nodes_count: 7429, Cost Time: 613.29s\n",
      "Time: 2019-05-08 10:18:00.815000000~2019-05-08 10:33:01.812000000, Loss: 0.6243, Nodes_count: 7618, Cost Time: 641.00s\n",
      "Time: 2019-05-08 10:33:01.812000000~2019-05-08 10:48:01.890000000, Loss: 0.6741, Nodes_count: 8107, Cost Time: 666.04s\n",
      "Time: 2019-05-08 10:48:01.890000000~2019-05-08 11:03:02.822000000, Loss: 0.6525, Nodes_count: 8548, Cost Time: 711.63s\n",
      "Time: 2019-05-08 11:03:02.822000000~2019-05-08 11:18:05.745000000, Loss: 0.6755, Nodes_count: 8939, Cost Time: 745.30s\n",
      "Time: 2019-05-08 11:18:05.745000000~2019-05-08 11:33:06.985000000, Loss: 0.6511, Nodes_count: 9428, Cost Time: 790.30s\n",
      "Time: 2019-05-08 11:33:06.985000000~2019-05-08 11:48:07.904000000, Loss: 0.6768, Nodes_count: 10547, Cost Time: 875.33s\n",
      "Time: 2019-05-08 11:48:07.904000000~2019-05-08 12:03:09.841000000, Loss: 0.6526, Nodes_count: 10981, Cost Time: 926.01s\n",
      "Time: 2019-05-08 12:03:09.841000000~2019-05-08 12:18:11.147000000, Loss: 0.6731, Nodes_count: 11659, Cost Time: 981.25s\n",
      "Time: 2019-05-08 12:18:11.147000000~2019-05-08 12:33:12.556000000, Loss: 0.6135, Nodes_count: 11798, Cost Time: 1004.26s\n",
      "Time: 2019-05-08 12:33:12.556000000~2019-05-08 14:41:30.514000000, Loss: 0.6874, Nodes_count: 12036, Cost Time: 1021.14s\n",
      "Time: 2019-05-08 14:41:30.514000000~2019-05-08 14:57:51.443000000, Loss: 0.6264, Nodes_count: 16024, Cost Time: 1060.07s\n",
      "Time: 2019-05-08 14:57:51.443000000~2019-05-08 15:12:56.707000000, Loss: 0.5119, Nodes_count: 16120, Cost Time: 1071.79s\n",
      "Time: 2019-05-08 15:12:56.707000000~2019-05-08 15:27:58.022000000, Loss: 0.6386, Nodes_count: 16545, Cost Time: 1088.50s\n",
      "Time: 2019-05-08 15:27:58.022000000~2019-05-08 15:42:59.413000000, Loss: 0.6688, Nodes_count: 17451, Cost Time: 1117.31s\n",
      "Time: 2019-05-08 15:42:59.413000000~2019-05-08 15:57:59.633000000, Loss: 0.6707, Nodes_count: 18388, Cost Time: 1143.81s\n",
      "Time: 2019-05-08 15:57:59.633000000~2019-05-08 16:13:00.161000000, Loss: 0.6704, Nodes_count: 19771, Cost Time: 1220.39s\n",
      "Time: 2019-05-08 16:13:00.161000000~2019-05-08 16:28:01.949000000, Loss: 0.6683, Nodes_count: 20974, Cost Time: 1274.71s\n",
      "Time: 2019-05-08 16:28:01.949000000~2019-05-08 16:44:13.715000000, Loss: 0.6591, Nodes_count: 21539, Cost Time: 1327.78s\n",
      "Time: 2019-05-08 16:44:13.715000000~2019-05-08 16:59:35.550000000, Loss: 0.5054, Nodes_count: 21586, Cost Time: 1333.35s\n",
      "Time: 2019-05-08 16:59:35.550000000~2019-05-08 17:14:57.292000000, Loss: 0.4869, Nodes_count: 21638, Cost Time: 1346.63s\n",
      "Time: 2019-05-08 17:14:57.292000000~2019-05-08 17:30:16.207000000, Loss: 0.4964, Nodes_count: 21684, Cost Time: 1351.25s\n",
      "Time: 2019-05-08 17:30:16.207000000~2019-05-08 17:45:20.894000000, Loss: 0.4831, Nodes_count: 21738, Cost Time: 1364.54s\n",
      "Time: 2019-05-08 17:45:20.894000000~2019-05-08 18:00:43.599000000, Loss: 0.4947, Nodes_count: 21783, Cost Time: 1369.13s\n",
      "Time: 2019-05-08 18:00:43.599000000~2019-05-08 18:15:50.363000000, Loss: 0.4884, Nodes_count: 21840, Cost Time: 1382.19s\n",
      "Time: 2019-05-08 18:15:50.363000000~2019-05-08 18:31:18.479000000, Loss: 0.5135, Nodes_count: 21884, Cost Time: 1387.19s\n",
      "Time: 2019-05-08 18:31:18.479000000~2019-05-08 18:46:18.819000000, Loss: 0.4740, Nodes_count: 21936, Cost Time: 1399.41s\n",
      "Time: 2019-05-08 18:46:18.819000000~2019-05-08 19:01:46.965000000, Loss: 0.4849, Nodes_count: 21983, Cost Time: 1404.17s\n",
      "Time: 2019-05-08 19:01:46.965000000~2019-05-08 19:16:56.840000000, Loss: 0.4850, Nodes_count: 22028, Cost Time: 1417.88s\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Time: 2019-05-08 19:16:56.840000000~2019-05-08 19:32:18.680000000, Loss: 0.5538, Nodes_count: 22079, Cost Time: 1422.78s\n",
      "Time: 2019-05-08 19:32:18.680000000~2019-05-08 19:47:22.439000000, Loss: 0.4772, Nodes_count: 22133, Cost Time: 1434.76s\n",
      "Time: 2019-05-08 19:47:22.439000000~2019-05-08 20:03:20.623000000, Loss: 0.5031, Nodes_count: 22224, Cost Time: 1439.56s\n",
      "Time: 2019-05-08 20:03:20.623000000~2019-05-08 20:18:21.783000000, Loss: 0.4716, Nodes_count: 22271, Cost Time: 1453.34s\n",
      "Time: 2019-05-08 20:18:21.783000000~2019-05-08 20:33:23.285000000, Loss: 0.4788, Nodes_count: 22319, Cost Time: 1458.59s\n",
      "Time: 2019-05-08 20:33:23.285000000~2019-05-08 20:48:24.371000000, Loss: 0.4729, Nodes_count: 22359, Cost Time: 1472.77s\n",
      "Time: 2019-05-08 20:48:24.371000000~2019-05-08 21:03:25.813000000, Loss: 0.4733, Nodes_count: 22405, Cost Time: 1477.88s\n",
      "Time: 2019-05-08 21:03:25.813000000~2019-05-08 21:18:27.042000000, Loss: 0.4760, Nodes_count: 22455, Cost Time: 1491.22s\n",
      "Time: 2019-05-08 21:18:27.042000000~2019-05-08 21:33:28.426000000, Loss: 0.5168, Nodes_count: 22497, Cost Time: 1496.11s\n",
      "Time: 2019-05-08 21:33:28.426000000~2019-05-08 21:48:29.347000000, Loss: 0.4769, Nodes_count: 22541, Cost Time: 1508.34s\n",
      "Time: 2019-05-08 21:48:29.347000000~2019-05-08 22:03:30.689000000, Loss: 0.5028, Nodes_count: 22585, Cost Time: 1513.01s\n",
      "Time: 2019-05-08 22:03:30.689000000~2019-05-08 22:18:32.050000000, Loss: 0.4780, Nodes_count: 22642, Cost Time: 1527.90s\n",
      "Time: 2019-05-08 22:18:32.050000000~2019-05-08 22:33:33.434000000, Loss: 0.4862, Nodes_count: 22685, Cost Time: 1533.07s\n",
      "Time: 2019-05-08 22:33:33.434000000~2019-05-08 22:48:34.655000000, Loss: 0.4894, Nodes_count: 22728, Cost Time: 1546.36s\n",
      "Time: 2019-05-08 22:48:34.655000000~2019-05-08 23:03:35.791000000, Loss: 0.5029, Nodes_count: 22778, Cost Time: 1550.40s\n",
      "Time: 2019-05-08 23:03:35.791000000~2019-05-08 23:18:37.055000000, Loss: 0.4769, Nodes_count: 22815, Cost Time: 1563.97s\n",
      "Time: 2019-05-08 23:18:37.055000000~2019-05-08 23:33:38.465000000, Loss: 0.4968, Nodes_count: 22853, Cost Time: 1568.68s\n",
      "Time: 2019-05-08 23:33:38.465000000~2019-05-08 23:48:39.828000000, Loss: 0.4758, Nodes_count: 22906, Cost Time: 1581.91s\n"
     ]
    }
   ],
   "source": [
    "ans_5_8=test_day_new(graph_5_8,\"graph_5_8\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "after merge: TemporalData(dst=[8438455], msg=[8438455, 42], src=[8438455], t=[8438455])\n",
      "Time: 2019-05-09 00:00:00.288000000~2019-05-09 00:15:17.459000000, Loss: 0.5977, Nodes_count: 770, Cost Time: 8.28s\n",
      "Time: 2019-05-09 00:15:17.459000000~2019-05-09 00:32:11.493000000, Loss: 0.5366, Nodes_count: 866, Cost Time: 13.16s\n",
      "Time: 2019-05-09 00:32:11.493000000~2019-05-09 00:47:26.637000000, Loss: 0.4846, Nodes_count: 918, Cost Time: 26.56s\n",
      "Time: 2019-05-09 00:47:26.637000000~2019-05-09 01:02:46.300000000, Loss: 0.4879, Nodes_count: 972, Cost Time: 31.10s\n",
      "Time: 2019-05-09 01:02:46.300000000~2019-05-09 01:18:00.466000000, Loss: 0.4838, Nodes_count: 1023, Cost Time: 45.92s\n",
      "Time: 2019-05-09 01:18:00.466000000~2019-05-09 01:33:21.459000000, Loss: 0.4953, Nodes_count: 1073, Cost Time: 50.36s\n",
      "Time: 2019-05-09 01:33:21.459000000~2019-05-09 01:48:44.527000000, Loss: 0.4802, Nodes_count: 1121, Cost Time: 64.78s\n",
      "Time: 2019-05-09 01:48:44.527000000~2019-05-09 02:03:51.619000000, Loss: 0.5013, Nodes_count: 1174, Cost Time: 69.67s\n",
      "Time: 2019-05-09 02:03:51.619000000~2019-05-09 02:18:52.685000000, Loss: 0.4783, Nodes_count: 1229, Cost Time: 84.09s\n",
      "Time: 2019-05-09 02:18:52.685000000~2019-05-09 02:33:54.099000000, Loss: 0.5013, Nodes_count: 1275, Cost Time: 89.03s\n",
      "Time: 2019-05-09 02:33:54.099000000~2019-05-09 02:48:55.402000000, Loss: 0.4777, Nodes_count: 1327, Cost Time: 105.24s\n",
      "Time: 2019-05-09 02:48:55.402000000~2019-05-09 03:03:56.834000000, Loss: 0.4909, Nodes_count: 1374, Cost Time: 110.12s\n",
      "Time: 2019-05-09 03:03:56.834000000~2019-05-09 03:18:57.991000000, Loss: 0.4719, Nodes_count: 1433, Cost Time: 124.39s\n",
      "Time: 2019-05-09 03:18:57.991000000~2019-05-09 03:33:59.556000000, Loss: 0.4899, Nodes_count: 1485, Cost Time: 129.08s\n",
      "Time: 2019-05-09 03:33:59.556000000~2019-05-09 03:49:00.513000000, Loss: 0.4909, Nodes_count: 1539, Cost Time: 143.27s\n",
      "Time: 2019-05-09 03:49:00.513000000~2019-05-09 04:04:01.793000000, Loss: 0.5644, Nodes_count: 1585, Cost Time: 147.81s\n",
      "Time: 2019-05-09 04:04:01.793000000~2019-05-09 04:19:02.809000000, Loss: 0.4745, Nodes_count: 1643, Cost Time: 162.43s\n",
      "Time: 2019-05-09 04:19:02.809000000~2019-05-09 04:34:04.004000000, Loss: 0.4862, Nodes_count: 1695, Cost Time: 167.28s\n",
      "Time: 2019-05-09 04:34:04.004000000~2019-05-09 04:49:05.037000000, Loss: 0.4753, Nodes_count: 1747, Cost Time: 181.76s\n",
      "Time: 2019-05-09 04:49:05.037000000~2019-05-09 05:04:06.492000000, Loss: 0.4931, Nodes_count: 1803, Cost Time: 186.81s\n",
      "Time: 2019-05-09 05:04:06.492000000~2019-05-09 05:19:07.847000000, Loss: 0.4710, Nodes_count: 1856, Cost Time: 203.09s\n",
      "Time: 2019-05-09 05:19:07.847000000~2019-05-09 05:34:09.167000000, Loss: 0.4854, Nodes_count: 1913, Cost Time: 207.89s\n",
      "Time: 2019-05-09 05:34:09.167000000~2019-05-09 05:49:10.431000000, Loss: 0.4832, Nodes_count: 1962, Cost Time: 223.77s\n",
      "Time: 2019-05-09 05:49:10.431000000~2019-05-09 06:04:11.763000000, Loss: 0.4991, Nodes_count: 2019, Cost Time: 228.04s\n",
      "Time: 2019-05-09 06:04:11.763000000~2019-05-09 06:19:13.043000000, Loss: 0.4917, Nodes_count: 2067, Cost Time: 244.04s\n",
      "Time: 2019-05-09 06:19:13.043000000~2019-05-09 06:34:14.383000000, Loss: 0.5018, Nodes_count: 2127, Cost Time: 248.86s\n",
      "Time: 2019-05-09 06:34:14.383000000~2019-05-09 06:49:15.508000000, Loss: 0.4734, Nodes_count: 2179, Cost Time: 264.73s\n",
      "Time: 2019-05-09 06:49:15.508000000~2019-05-09 07:04:16.877000000, Loss: 0.4894, Nodes_count: 2229, Cost Time: 269.66s\n",
      "Time: 2019-05-09 07:04:16.877000000~2019-05-09 07:19:18.257000000, Loss: 0.4750, Nodes_count: 2283, Cost Time: 284.51s\n",
      "Time: 2019-05-09 07:19:18.257000000~2019-05-09 07:34:19.692000000, Loss: 0.4993, Nodes_count: 2338, Cost Time: 289.26s\n",
      "Time: 2019-05-09 07:34:19.692000000~2019-05-09 07:49:20.826000000, Loss: 0.4789, Nodes_count: 2437, Cost Time: 303.32s\n",
      "Time: 2019-05-09 07:49:20.826000000~2019-05-09 08:04:22.250000000, Loss: 0.5361, Nodes_count: 2502, Cost Time: 308.17s\n",
      "Time: 2019-05-09 08:04:22.250000000~2019-05-09 08:19:23.701000000, Loss: 0.4862, Nodes_count: 2617, Cost Time: 323.89s\n",
      "Time: 2019-05-09 08:19:23.701000000~2019-05-09 08:34:25.162000000, Loss: 0.4835, Nodes_count: 2684, Cost Time: 328.75s\n",
      "Time: 2019-05-09 08:34:25.162000000~2019-05-09 08:49:26.327000000, Loss: 0.4726, Nodes_count: 2751, Cost Time: 344.62s\n",
      "Time: 2019-05-09 08:49:26.327000000~2019-05-09 09:04:27.470000000, Loss: 0.6658, Nodes_count: 3131, Cost Time: 349.40s\n",
      "Time: 2019-05-09 09:04:27.470000000~2019-05-09 09:19:28.488000000, Loss: 0.6613, Nodes_count: 4159, Cost Time: 418.16s\n",
      "Time: 2019-05-09 09:19:28.488000000~2019-05-09 09:34:30.103000000, Loss: 0.6842, Nodes_count: 4494, Cost Time: 442.38s\n",
      "Time: 2019-05-09 09:34:30.103000000~2019-05-09 09:49:31.055000000, Loss: 0.6304, Nodes_count: 4834, Cost Time: 478.71s\n",
      "Time: 2019-05-09 09:49:31.055000000~2019-05-09 10:04:32.350000000, Loss: 0.6649, Nodes_count: 5201, Cost Time: 503.21s\n",
      "Time: 2019-05-09 10:04:32.350000000~2019-05-09 10:19:33.657000000, Loss: 0.5582, Nodes_count: 5404, Cost Time: 537.51s\n",
      "Time: 2019-05-09 10:19:33.657000000~2019-05-09 10:34:35.038000000, Loss: 0.6696, Nodes_count: 5761, Cost Time: 557.95s\n",
      "Time: 2019-05-09 10:34:35.038000000~2019-05-09 10:49:35.245000000, Loss: 0.6551, Nodes_count: 6487, Cost Time: 630.48s\n",
      "Time: 2019-05-09 10:49:35.245000000~2019-05-09 11:04:37.266000000, Loss: 0.6701, Nodes_count: 7096, Cost Time: 677.65s\n",
      "Time: 2019-05-09 11:04:37.266000000~2019-05-09 11:19:38.480000000, Loss: 0.6566, Nodes_count: 7837, Cost Time: 753.82s\n",
      "Time: 2019-05-09 11:19:38.480000000~2019-05-09 11:34:40.383000000, Loss: 0.6871, Nodes_count: 8945, Cost Time: 815.33s\n",
      "Time: 2019-05-09 11:34:40.383000000~2019-05-09 11:49:41.715000000, Loss: 0.6262, Nodes_count: 9316, Cost Time: 864.33s\n",
      "Time: 2019-05-09 11:49:41.715000000~2019-05-09 12:04:41.865000000, Loss: 0.6656, Nodes_count: 9773, Cost Time: 897.77s\n",
      "Time: 2019-05-09 12:04:41.865000000~2019-05-09 12:19:44.123000000, Loss: 0.6464, Nodes_count: 10289, Cost Time: 954.79s\n",
      "Time: 2019-05-09 12:19:44.123000000~2019-05-09 12:34:45.579000000, Loss: 0.6560, Nodes_count: 10610, Cost Time: 979.10s\n",
      "Time: 2019-05-09 12:34:45.579000000~2019-05-09 12:49:46.836000000, Loss: 0.6033, Nodes_count: 10852, Cost Time: 1016.85s\n",
      "Time: 2019-05-09 12:49:46.836000000~2019-05-09 13:04:48.051000000, Loss: 0.6807, Nodes_count: 11736, Cost Time: 1060.42s\n",
      "Time: 2019-05-09 13:04:48.051000000~2019-05-09 13:19:49.130000000, Loss: 0.6464, Nodes_count: 12244, Cost Time: 1112.41s\n",
      "Time: 2019-05-09 13:19:49.130000000~2019-05-09 13:34:50.533000000, Loss: 0.6731, Nodes_count: 12853, Cost Time: 1150.21s\n",
      "Time: 2019-05-09 13:34:50.533000000~2019-05-09 13:49:51.969000000, Loss: 0.6269, Nodes_count: 13131, Cost Time: 1185.77s\n",
      "Time: 2019-05-09 13:49:51.969000000~2019-05-09 14:04:53.357000000, Loss: 0.6686, Nodes_count: 13521, Cost Time: 1216.70s\n",
      "Time: 2019-05-09 14:04:53.357000000~2019-05-09 14:19:53.798000000, Loss: 0.6524, Nodes_count: 14116, Cost Time: 1284.82s\n",
      "Time: 2019-05-09 14:19:53.798000000~2019-05-09 14:34:55.765000000, Loss: 0.6572, Nodes_count: 14512, Cost Time: 1306.86s\n",
      "Time: 2019-05-09 14:34:55.765000000~2019-05-09 14:49:56.993000000, Loss: 0.6590, Nodes_count: 15138, Cost Time: 1365.68s\n",
      "Time: 2019-05-09 14:49:56.993000000~2019-05-09 15:04:58.532000000, Loss: 0.6702, Nodes_count: 15780, Cost Time: 1407.81s\n",
      "Time: 2019-05-09 15:04:58.532000000~2019-05-09 15:19:59.522000000, Loss: 0.6479, Nodes_count: 16196, Cost Time: 1456.10s\n",
      "Time: 2019-05-09 15:19:59.522000000~2019-05-09 15:35:00.736000000, Loss: 0.6455, Nodes_count: 16400, Cost Time: 1474.31s\n",
      "Time: 2019-05-09 15:35:00.736000000~2019-05-09 15:50:01.662000000, Loss: 0.5923, Nodes_count: 16628, Cost Time: 1500.25s\n",
      "Time: 2019-05-09 15:50:01.662000000~2019-05-09 16:05:02.909000000, Loss: 0.6634, Nodes_count: 16891, Cost Time: 1520.36s\n",
      "Time: 2019-05-09 16:05:02.909000000~2019-05-09 16:20:03.714000000, Loss: 0.6488, Nodes_count: 17254, Cost Time: 1562.40s\n",
      "Time: 2019-05-09 16:20:03.714000000~2019-05-09 16:35:04.777000000, Loss: 0.6588, Nodes_count: 17536, Cost Time: 1585.14s\n",
      "Time: 2019-05-09 16:35:04.777000000~2019-05-09 16:50:06.007000000, Loss: 0.6090, Nodes_count: 17746, Cost Time: 1611.36s\n",
      "Time: 2019-05-09 16:50:06.007000000~2019-05-09 17:05:07.553000000, Loss: 0.6567, Nodes_count: 18074, Cost Time: 1636.38s\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Time: 2019-05-09 17:05:07.553000000~2019-05-09 17:20:08.838000000, Loss: 0.5278, Nodes_count: 18160, Cost Time: 1656.11s\n",
      "Time: 2019-05-09 17:20:08.838000000~2019-05-09 17:35:10.111000000, Loss: 0.6546, Nodes_count: 18417, Cost Time: 1677.69s\n",
      "Time: 2019-05-09 17:35:10.111000000~2019-05-09 17:50:11.084000000, Loss: 0.6550, Nodes_count: 18861, Cost Time: 1730.69s\n",
      "Time: 2019-05-09 17:50:11.084000000~2019-05-09 18:05:12.449000000, Loss: 0.6631, Nodes_count: 19261, Cost Time: 1763.60s\n",
      "Time: 2019-05-09 18:05:12.449000000~2019-05-09 18:20:13.563000000, Loss: 0.6323, Nodes_count: 19578, Cost Time: 1810.49s\n",
      "Time: 2019-05-09 18:20:13.563000000~2019-05-09 18:35:15.440000000, Loss: 0.6724, Nodes_count: 20387, Cost Time: 1874.05s\n",
      "Time: 2019-05-09 18:35:15.440000000~2019-05-09 18:50:17.094000000, Loss: 0.6344, Nodes_count: 20725, Cost Time: 1914.45s\n",
      "Time: 2019-05-09 18:50:17.094000000~2019-05-09 19:05:19.380000000, Loss: 0.6668, Nodes_count: 21335, Cost Time: 1966.12s\n",
      "Time: 2019-05-09 19:05:19.380000000~2019-05-09 19:22:37.966000000, Loss: 0.6541, Nodes_count: 24956, Cost Time: 2022.06s\n",
      "Time: 2019-05-09 19:22:37.966000000~2019-05-09 19:38:38.334000000, Loss: 0.5238, Nodes_count: 24963, Cost Time: 2023.14s\n",
      "Time: 2019-05-09 19:38:38.334000000~2019-05-09 20:03:40.088000000, Loss: 0.5809, Nodes_count: 24977, Cost Time: 2023.48s\n",
      "Time: 2019-05-09 20:03:40.088000000~2019-05-09 20:45:19.328000000, Loss: 0.5292, Nodes_count: 25006, Cost Time: 2023.57s\n",
      "Time: 2019-05-09 20:45:19.328000000~2019-05-09 21:26:42.422000000, Loss: 0.4962, Nodes_count: 25032, Cost Time: 2023.64s\n",
      "Time: 2019-05-09 21:26:42.422000000~2019-05-09 22:08:19.444000000, Loss: 0.5183, Nodes_count: 25060, Cost Time: 2023.72s\n",
      "Time: 2019-05-09 22:08:19.444000000~2019-05-09 22:50:07.752000000, Loss: 0.5272, Nodes_count: 25082, Cost Time: 2023.80s\n",
      "Time: 2019-05-09 22:50:07.752000000~2019-05-09 23:31:44.071000000, Loss: 0.5095, Nodes_count: 25111, Cost Time: 2023.88s\n",
      "Time: 2019-05-09 23:31:44.071000000~2019-05-09 23:59:47.915000000, Loss: 0.4901, Nodes_count: 25127, Cost Time: 2023.95s\n"
     ]
    }
   ],
   "source": [
    "ans_5_9=test_day_new(graph_5_9,\"graph_5_9\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "after merge: TemporalData(dst=[34775], msg=[34775, 42], src=[34775], t=[34775])\n",
      "Time: 2019-05-11 00:00:10.521000000~2019-05-11 00:41:33.462000000, Loss: 2.0373, Nodes_count: 34, Cost Time: 0.04s\n",
      "Time: 2019-05-11 00:41:33.462000000~2019-05-11 01:23:21.609000000, Loss: 1.5618, Nodes_count: 58, Cost Time: 0.11s\n",
      "Time: 2019-05-11 01:23:21.609000000~2019-05-11 02:04:35.648000000, Loss: 0.6643, Nodes_count: 85, Cost Time: 0.17s\n",
      "Time: 2019-05-11 02:04:35.648000000~2019-05-11 02:46:01.387000000, Loss: 0.5496, Nodes_count: 119, Cost Time: 0.24s\n",
      "Time: 2019-05-11 02:46:01.387000000~2019-05-11 03:28:01.426000000, Loss: 0.5156, Nodes_count: 140, Cost Time: 0.30s\n",
      "Time: 2019-05-11 03:28:01.426000000~2019-05-11 04:09:37.675000000, Loss: 0.5406, Nodes_count: 164, Cost Time: 0.36s\n",
      "Time: 2019-05-11 04:09:37.675000000~2019-05-11 04:50:41.283000000, Loss: 0.5299, Nodes_count: 202, Cost Time: 0.43s\n",
      "Time: 2019-05-11 04:50:41.283000000~2019-05-11 05:32:05.364000000, Loss: 0.5387, Nodes_count: 233, Cost Time: 0.50s\n",
      "Time: 2019-05-11 05:32:05.364000000~2019-05-11 06:14:41.361000000, Loss: 0.5769, Nodes_count: 240, Cost Time: 0.56s\n",
      "Time: 2019-05-11 06:14:41.361000000~2019-05-11 06:57:07.689000000, Loss: 0.5639, Nodes_count: 240, Cost Time: 0.62s\n",
      "Time: 2019-05-11 06:57:07.689000000~2019-05-11 07:39:46.260000000, Loss: 0.5234, Nodes_count: 240, Cost Time: 0.68s\n",
      "Time: 2019-05-11 07:39:46.260000000~2019-05-11 08:22:45.695000000, Loss: 0.5043, Nodes_count: 240, Cost Time: 0.74s\n",
      "Time: 2019-05-11 08:22:45.695000000~2019-05-11 09:05:12.021000000, Loss: 0.5214, Nodes_count: 240, Cost Time: 0.80s\n",
      "Time: 2019-05-11 09:05:12.021000000~2019-05-11 09:47:50.155000000, Loss: 0.5026, Nodes_count: 240, Cost Time: 0.86s\n",
      "Time: 2019-05-11 09:47:50.155000000~2019-05-11 10:30:49.417000000, Loss: 0.4910, Nodes_count: 240, Cost Time: 0.93s\n",
      "Time: 2019-05-11 10:30:49.417000000~2019-05-11 11:13:16.311000000, Loss: 0.4981, Nodes_count: 240, Cost Time: 0.99s\n",
      "Time: 2019-05-11 11:13:16.311000000~2019-05-11 11:55:54.099000000, Loss: 0.4953, Nodes_count: 240, Cost Time: 1.05s\n",
      "Time: 2019-05-11 11:55:54.099000000~2019-05-11 12:38:53.278000000, Loss: 0.4815, Nodes_count: 240, Cost Time: 1.10s\n",
      "Time: 2019-05-11 12:38:53.278000000~2019-05-11 13:21:20.026000000, Loss: 0.5004, Nodes_count: 240, Cost Time: 1.16s\n",
      "Time: 2019-05-11 13:21:20.026000000~2019-05-11 14:03:58.258000000, Loss: 0.5026, Nodes_count: 240, Cost Time: 1.22s\n",
      "Time: 2019-05-11 14:03:58.258000000~2019-05-11 14:46:57.303000000, Loss: 0.4954, Nodes_count: 240, Cost Time: 1.28s\n",
      "Time: 2019-05-11 14:46:57.303000000~2019-05-11 15:29:22.727000000, Loss: 0.5256, Nodes_count: 240, Cost Time: 1.33s\n",
      "Time: 2019-05-11 15:29:22.727000000~2019-05-11 16:12:02.210000000, Loss: 0.5257, Nodes_count: 240, Cost Time: 1.39s\n",
      "Time: 2019-05-11 16:12:02.210000000~2019-05-11 16:55:01.313000000, Loss: 0.4945, Nodes_count: 240, Cost Time: 1.46s\n",
      "Time: 2019-05-11 16:55:01.313000000~2019-05-11 17:37:22.913000000, Loss: 0.4953, Nodes_count: 240, Cost Time: 1.52s\n",
      "Time: 2019-05-11 17:37:22.913000000~2019-05-11 18:20:06.326000000, Loss: 0.5040, Nodes_count: 240, Cost Time: 1.58s\n",
      "Time: 2019-05-11 18:20:06.326000000~2019-05-11 19:03:05.351000000, Loss: 0.4911, Nodes_count: 240, Cost Time: 1.63s\n",
      "Time: 2019-05-11 19:03:05.351000000~2019-05-11 19:45:23.083000000, Loss: 0.5085, Nodes_count: 240, Cost Time: 1.69s\n",
      "Time: 2019-05-11 19:45:23.083000000~2019-05-11 20:28:10.828000000, Loss: 0.5020, Nodes_count: 240, Cost Time: 1.75s\n",
      "Time: 2019-05-11 20:28:10.828000000~2019-05-11 21:11:08.839000000, Loss: 0.4838, Nodes_count: 240, Cost Time: 1.81s\n",
      "Time: 2019-05-11 21:11:08.839000000~2019-05-11 21:53:23.245000000, Loss: 0.4973, Nodes_count: 240, Cost Time: 1.88s\n",
      "Time: 2019-05-11 21:53:23.245000000~2019-05-11 22:36:14.939000000, Loss: 0.5102, Nodes_count: 240, Cost Time: 1.93s\n",
      "Time: 2019-05-11 22:36:14.939000000~2019-05-11 23:19:12.771000000, Loss: 0.4887, Nodes_count: 240, Cost Time: 1.99s\n",
      "Time: 2019-05-11 23:19:12.771000000~2019-05-11 23:59:39.175000000, Loss: 0.5085, Nodes_count: 240, Cost Time: 2.05s\n"
     ]
    }
   ],
   "source": [
    "ans_5_11=test_day_new(graph_5_11,\"graph_5_11\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "after merge: TemporalData(dst=[34775], msg=[34775, 42], src=[34775], t=[34775])\n",
      "Time: 2019-05-11 00:00:10.521000000~2019-05-11 00:41:33.462000000, Loss: 2.0373, Nodes_count: 34, Cost Time: 0.05s\n",
      "Time: 2019-05-11 00:41:33.462000000~2019-05-11 01:23:21.609000000, Loss: 1.5618, Nodes_count: 58, Cost Time: 0.12s\n",
      "Time: 2019-05-11 01:23:21.609000000~2019-05-11 02:04:35.648000000, Loss: 0.6643, Nodes_count: 85, Cost Time: 0.19s\n",
      "Time: 2019-05-11 02:04:35.648000000~2019-05-11 02:46:01.387000000, Loss: 0.5496, Nodes_count: 119, Cost Time: 0.26s\n",
      "Time: 2019-05-11 02:46:01.387000000~2019-05-11 03:28:01.426000000, Loss: 0.5156, Nodes_count: 140, Cost Time: 0.32s\n",
      "Time: 2019-05-11 03:28:01.426000000~2019-05-11 04:09:37.675000000, Loss: 0.5406, Nodes_count: 164, Cost Time: 0.39s\n",
      "Time: 2019-05-11 04:09:37.675000000~2019-05-11 04:50:41.283000000, Loss: 0.5299, Nodes_count: 202, Cost Time: 0.48s\n",
      "Time: 2019-05-11 04:50:41.283000000~2019-05-11 05:32:05.364000000, Loss: 0.5387, Nodes_count: 233, Cost Time: 0.56s\n",
      "Time: 2019-05-11 05:32:05.364000000~2019-05-11 06:14:41.361000000, Loss: 0.5769, Nodes_count: 240, Cost Time: 0.63s\n",
      "Time: 2019-05-11 06:14:41.361000000~2019-05-11 06:57:07.689000000, Loss: 0.5639, Nodes_count: 240, Cost Time: 0.68s\n",
      "Time: 2019-05-11 06:57:07.689000000~2019-05-11 07:39:46.260000000, Loss: 0.5234, Nodes_count: 240, Cost Time: 0.74s\n",
      "Time: 2019-05-11 07:39:46.260000000~2019-05-11 08:22:45.695000000, Loss: 0.5043, Nodes_count: 240, Cost Time: 0.80s\n",
      "Time: 2019-05-11 08:22:45.695000000~2019-05-11 09:05:12.021000000, Loss: 0.5214, Nodes_count: 240, Cost Time: 0.86s\n",
      "Time: 2019-05-11 09:05:12.021000000~2019-05-11 09:47:50.155000000, Loss: 0.5026, Nodes_count: 240, Cost Time: 0.93s\n",
      "Time: 2019-05-11 09:47:50.155000000~2019-05-11 10:30:49.417000000, Loss: 0.4910, Nodes_count: 240, Cost Time: 0.98s\n",
      "Time: 2019-05-11 10:30:49.417000000~2019-05-11 11:13:16.311000000, Loss: 0.4981, Nodes_count: 240, Cost Time: 1.04s\n",
      "Time: 2019-05-11 11:13:16.311000000~2019-05-11 11:55:54.099000000, Loss: 0.4953, Nodes_count: 240, Cost Time: 1.10s\n",
      "Time: 2019-05-11 11:55:54.099000000~2019-05-11 12:38:53.278000000, Loss: 0.4815, Nodes_count: 240, Cost Time: 1.16s\n",
      "Time: 2019-05-11 12:38:53.278000000~2019-05-11 13:21:20.026000000, Loss: 0.5004, Nodes_count: 240, Cost Time: 1.22s\n",
      "Time: 2019-05-11 13:21:20.026000000~2019-05-11 14:03:58.258000000, Loss: 0.5026, Nodes_count: 240, Cost Time: 1.29s\n",
      "Time: 2019-05-11 14:03:58.258000000~2019-05-11 14:46:57.303000000, Loss: 0.4954, Nodes_count: 240, Cost Time: 1.35s\n",
      "Time: 2019-05-11 14:46:57.303000000~2019-05-11 15:29:22.727000000, Loss: 0.5256, Nodes_count: 240, Cost Time: 1.41s\n",
      "Time: 2019-05-11 15:29:22.727000000~2019-05-11 16:12:02.210000000, Loss: 0.5257, Nodes_count: 240, Cost Time: 1.47s\n",
      "Time: 2019-05-11 16:12:02.210000000~2019-05-11 16:55:01.313000000, Loss: 0.4945, Nodes_count: 240, Cost Time: 1.53s\n",
      "Time: 2019-05-11 16:55:01.313000000~2019-05-11 17:37:22.913000000, Loss: 0.4953, Nodes_count: 240, Cost Time: 1.58s\n",
      "Time: 2019-05-11 17:37:22.913000000~2019-05-11 18:20:06.326000000, Loss: 0.5040, Nodes_count: 240, Cost Time: 1.64s\n",
      "Time: 2019-05-11 18:20:06.326000000~2019-05-11 19:03:05.351000000, Loss: 0.4911, Nodes_count: 240, Cost Time: 1.70s\n",
      "Time: 2019-05-11 19:03:05.351000000~2019-05-11 19:45:23.083000000, Loss: 0.5085, Nodes_count: 240, Cost Time: 1.75s\n",
      "Time: 2019-05-11 19:45:23.083000000~2019-05-11 20:28:10.828000000, Loss: 0.5020, Nodes_count: 240, Cost Time: 1.81s\n",
      "Time: 2019-05-11 20:28:10.828000000~2019-05-11 21:11:08.839000000, Loss: 0.4838, Nodes_count: 240, Cost Time: 1.88s\n",
      "Time: 2019-05-11 21:11:08.839000000~2019-05-11 21:53:23.245000000, Loss: 0.4973, Nodes_count: 240, Cost Time: 1.93s\n",
      "Time: 2019-05-11 21:53:23.245000000~2019-05-11 22:36:14.939000000, Loss: 0.5102, Nodes_count: 240, Cost Time: 2.00s\n",
      "Time: 2019-05-11 22:36:14.939000000~2019-05-11 23:19:12.771000000, Loss: 0.4887, Nodes_count: 240, Cost Time: 2.06s\n",
      "Time: 2019-05-11 23:19:12.771000000~2019-05-11 23:59:39.175000000, Loss: 0.5085, Nodes_count: 240, Cost Time: 2.13s\n"
     ]
    }
   ],
   "source": [
    "ans_5_12=test_day_new(graph_5_11,\"graph_5_12\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "after merge: TemporalData(dst=[9518075], msg=[9518075, 42], src=[9518075], t=[9518075])\n",
      "Time: 2019-05-14 00:00:00.110000000~2019-05-14 00:16:55.164000000, Loss: 0.5621, Nodes_count: 831, Cost Time: 19.18s\n",
      "Time: 2019-05-14 00:16:55.164000000~2019-05-14 00:31:56.512000000, Loss: 0.6636, Nodes_count: 902, Cost Time: 24.05s\n",
      "Time: 2019-05-14 00:31:56.512000000~2019-05-14 00:46:57.832000000, Loss: 0.4942, Nodes_count: 963, Cost Time: 47.27s\n",
      "Time: 2019-05-14 00:46:57.832000000~2019-05-14 01:01:59.206000000, Loss: 0.6481, Nodes_count: 1019, Cost Time: 51.91s\n",
      "Time: 2019-05-14 01:01:59.206000000~2019-05-14 01:17:00.513000000, Loss: 0.4883, Nodes_count: 1089, Cost Time: 75.51s\n",
      "Time: 2019-05-14 01:17:00.513000000~2019-05-14 01:32:01.927000000, Loss: 0.5433, Nodes_count: 1151, Cost Time: 80.95s\n",
      "Time: 2019-05-14 01:32:01.927000000~2019-05-14 01:47:02.953000000, Loss: 0.4940, Nodes_count: 1218, Cost Time: 103.32s\n",
      "Time: 2019-05-14 01:47:02.953000000~2019-05-14 02:02:04.367000000, Loss: 0.5369, Nodes_count: 1278, Cost Time: 108.08s\n",
      "Time: 2019-05-14 02:02:04.367000000~2019-05-14 02:17:05.742000000, Loss: 0.4853, Nodes_count: 1347, Cost Time: 131.60s\n",
      "Time: 2019-05-14 02:17:05.742000000~2019-05-14 02:32:06.894000000, Loss: 0.4960, Nodes_count: 1409, Cost Time: 137.10s\n",
      "Time: 2019-05-14 02:32:06.894000000~2019-05-14 02:47:08.231000000, Loss: 0.4788, Nodes_count: 1475, Cost Time: 161.08s\n",
      "Time: 2019-05-14 02:47:08.231000000~2019-05-14 03:02:09.643000000, Loss: 0.4914, Nodes_count: 1541, Cost Time: 165.94s\n",
      "Time: 2019-05-14 03:02:09.643000000~2019-05-14 03:17:10.873000000, Loss: 0.4971, Nodes_count: 1599, Cost Time: 184.99s\n",
      "Time: 2019-05-14 03:17:10.873000000~2019-05-14 03:32:12.207000000, Loss: 0.4963, Nodes_count: 1663, Cost Time: 190.25s\n",
      "Time: 2019-05-14 03:32:12.207000000~2019-05-14 03:47:13.548000000, Loss: 0.5028, Nodes_count: 1731, Cost Time: 211.04s\n",
      "Time: 2019-05-14 03:47:13.548000000~2019-05-14 04:02:14.881000000, Loss: 0.6564, Nodes_count: 1789, Cost Time: 215.91s\n",
      "Time: 2019-05-14 04:02:14.881000000~2019-05-14 04:17:16.184000000, Loss: 0.4920, Nodes_count: 1859, Cost Time: 232.21s\n",
      "Time: 2019-05-14 04:17:16.184000000~2019-05-14 04:32:17.569000000, Loss: 0.5089, Nodes_count: 1926, Cost Time: 237.49s\n",
      "Time: 2019-05-14 04:32:17.569000000~2019-05-14 04:47:18.740000000, Loss: 0.4882, Nodes_count: 1989, Cost Time: 253.92s\n",
      "Time: 2019-05-14 04:47:18.740000000~2019-05-14 05:02:20.031000000, Loss: 0.6471, Nodes_count: 2044, Cost Time: 258.65s\n",
      "Time: 2019-05-14 05:02:20.031000000~2019-05-14 05:17:21.435000000, Loss: 0.5208, Nodes_count: 2110, Cost Time: 275.07s\n",
      "Time: 2019-05-14 05:17:21.435000000~2019-05-14 05:32:22.709000000, Loss: 0.5338, Nodes_count: 2185, Cost Time: 279.63s\n",
      "Time: 2019-05-14 05:32:22.709000000~2019-05-14 05:47:23.937000000, Loss: 0.5299, Nodes_count: 2249, Cost Time: 295.98s\n",
      "Time: 2019-05-14 05:47:23.937000000~2019-05-14 06:02:25.486000000, Loss: 0.5967, Nodes_count: 2314, Cost Time: 301.69s\n",
      "Time: 2019-05-14 06:02:25.486000000~2019-05-14 06:17:26.839000000, Loss: 0.4955, Nodes_count: 2383, Cost Time: 319.84s\n",
      "Time: 2019-05-14 06:17:26.839000000~2019-05-14 06:32:28.279000000, Loss: 0.5317, Nodes_count: 2455, Cost Time: 324.42s\n",
      "Time: 2019-05-14 06:32:28.279000000~2019-05-14 06:47:29.637000000, Loss: 0.4931, Nodes_count: 2521, Cost Time: 345.96s\n",
      "Time: 2019-05-14 06:47:29.637000000~2019-05-14 07:02:31.171000000, Loss: 0.5535, Nodes_count: 2589, Cost Time: 351.46s\n",
      "Time: 2019-05-14 07:02:31.171000000~2019-05-14 07:17:32.480000000, Loss: 0.4893, Nodes_count: 2646, Cost Time: 373.67s\n",
      "Time: 2019-05-14 07:17:32.480000000~2019-05-14 07:32:33.922000000, Loss: 0.5118, Nodes_count: 2715, Cost Time: 378.48s\n",
      "Time: 2019-05-14 07:32:33.922000000~2019-05-14 07:47:34.625000000, Loss: 0.4868, Nodes_count: 2774, Cost Time: 402.42s\n",
      "Time: 2019-05-14 07:47:34.625000000~2019-05-14 08:02:36.238000000, Loss: 0.5117, Nodes_count: 2840, Cost Time: 407.79s\n",
      "Time: 2019-05-14 08:02:36.238000000~2019-05-14 08:17:37.204000000, Loss: 0.4869, Nodes_count: 2909, Cost Time: 433.28s\n",
      "Time: 2019-05-14 08:17:37.204000000~2019-05-14 08:32:38.759000000, Loss: 0.5351, Nodes_count: 2970, Cost Time: 438.68s\n",
      "Time: 2019-05-14 08:32:38.759000000~2019-05-14 08:47:39.890000000, Loss: 0.4938, Nodes_count: 3029, Cost Time: 464.82s\n",
      "Time: 2019-05-14 08:47:39.890000000~2019-05-14 09:02:41.157000000, Loss: 0.4995, Nodes_count: 3094, Cost Time: 469.74s\n",
      "Time: 2019-05-14 09:02:41.157000000~2019-05-14 09:17:42.517000000, Loss: 0.6637, Nodes_count: 4233, Cost Time: 532.47s\n",
      "Time: 2019-05-14 09:17:42.517000000~2019-05-14 09:32:44.093000000, Loss: 0.6742, Nodes_count: 4906, Cost Time: 574.13s\n",
      "Time: 2019-05-14 09:32:44.093000000~2019-05-14 09:47:44.971000000, Loss: 0.6269, Nodes_count: 5335, Cost Time: 617.73s\n",
      "Time: 2019-05-14 09:47:44.971000000~2019-05-14 10:02:46.433000000, Loss: 0.6768, Nodes_count: 6152, Cost Time: 667.42s\n",
      "Time: 2019-05-14 10:02:46.433000000~2019-05-14 10:17:47.485000000, Loss: 0.6626, Nodes_count: 6957, Cost Time: 743.67s\n",
      "Time: 2019-05-14 10:17:47.485000000~2019-05-14 10:32:48.904000000, Loss: 0.6543, Nodes_count: 7856, Cost Time: 769.55s\n",
      "Time: 2019-05-14 10:32:48.904000000~2019-05-14 10:47:49.773000000, Loss: 0.6387, Nodes_count: 8321, Cost Time: 817.59s\n",
      "Time: 2019-05-14 10:47:49.773000000~2019-05-14 11:02:51.213000000, Loss: 0.6739, Nodes_count: 9057, Cost Time: 877.00s\n",
      "Time: 2019-05-14 11:02:51.213000000~2019-05-14 11:17:52.335000000, Loss: 0.6206, Nodes_count: 9386, Cost Time: 923.28s\n",
      "Time: 2019-05-14 11:17:52.335000000~2019-05-14 11:32:53.549000000, Loss: 0.6705, Nodes_count: 9828, Cost Time: 956.99s\n",
      "Time: 2019-05-14 11:32:53.549000000~2019-05-14 11:47:54.952000000, Loss: 0.5769, Nodes_count: 10080, Cost Time: 1002.25s\n",
      "Time: 2019-05-14 11:47:54.952000000~2019-05-14 12:02:56.496000000, Loss: 0.6894, Nodes_count: 10881, Cost Time: 1055.95s\n",
      "Time: 2019-05-14 12:02:56.496000000~2019-05-14 12:17:57.806000000, Loss: 0.6625, Nodes_count: 11443, Cost Time: 1112.07s\n",
      "Time: 2019-05-14 12:17:57.806000000~2019-05-14 12:32:59.282000000, Loss: 0.6751, Nodes_count: 11759, Cost Time: 1137.82s\n",
      "Time: 2019-05-14 12:32:59.282000000~2019-05-14 12:48:00.531000000, Loss: 0.6087, Nodes_count: 11919, Cost Time: 1159.43s\n",
      "Time: 2019-05-14 12:48:00.531000000~2019-05-14 13:03:02.064000000, Loss: 0.6705, Nodes_count: 12147, Cost Time: 1170.82s\n",
      "Time: 2019-05-14 13:03:02.064000000~2019-05-14 13:18:02.860000000, Loss: 0.6479, Nodes_count: 12533, Cost Time: 1224.38s\n",
      "Time: 2019-05-14 13:18:02.860000000~2019-05-14 13:33:03.664000000, Loss: 0.6767, Nodes_count: 13354, Cost Time: 1290.08s\n",
      "Time: 2019-05-14 13:33:03.664000000~2019-05-14 13:48:04.514000000, Loss: 0.6706, Nodes_count: 17402, Cost Time: 1360.51s\n",
      "Time: 2019-05-14 13:48:04.514000000~2019-05-14 14:03:07.325000000, Loss: 0.6790, Nodes_count: 18088, Cost Time: 1412.28s\n",
      "Time: 2019-05-14 14:03:07.325000000~2019-05-14 14:18:08.816000000, Loss: 0.6469, Nodes_count: 18373, Cost Time: 1447.57s\n",
      "Time: 2019-05-14 14:18:08.816000000~2019-05-14 14:33:09.949000000, Loss: 0.6767, Nodes_count: 18776, Cost Time: 1481.00s\n",
      "Time: 2019-05-14 14:33:09.949000000~2019-05-14 14:48:12.419000000, Loss: 0.6687, Nodes_count: 19539, Cost Time: 1553.85s\n",
      "Time: 2019-05-14 14:48:12.419000000~2019-05-14 15:04:01.280000000, Loss: 0.6598, Nodes_count: 20598, Cost Time: 1638.01s\n",
      "Time: 2019-05-14 15:04:01.280000000~2019-05-14 15:20:00.233000000, Loss: 0.6477, Nodes_count: 21175, Cost Time: 1710.06s\n",
      "Time: 2019-05-14 15:20:00.233000000~2019-05-14 15:35:02.947000000, Loss: 0.6600, Nodes_count: 21331, Cost Time: 1723.94s\n",
      "Time: 2019-05-14 15:35:02.947000000~2019-05-14 15:50:03.958000000, Loss: 0.5768, Nodes_count: 21506, Cost Time: 1755.07s\n",
      "Time: 2019-05-14 15:50:03.958000000~2019-05-14 16:05:05.450000000, Loss: 0.6898, Nodes_count: 21829, Cost Time: 1780.74s\n",
      "Time: 2019-05-14 16:05:05.450000000~2019-05-14 16:20:06.873000000, Loss: 0.6386, Nodes_count: 22372, Cost Time: 1850.26s\n",
      "Time: 2019-05-14 16:20:06.873000000~2019-05-14 16:35:08.328000000, Loss: 0.6848, Nodes_count: 23023, Cost Time: 1902.45s\n",
      "Time: 2019-05-14 16:35:08.328000000~2019-05-14 16:50:09.477000000, Loss: 0.6423, Nodes_count: 23318, Cost Time: 1954.39s\n",
      "Time: 2019-05-14 16:50:09.477000000~2019-05-14 17:05:10.964000000, Loss: 0.6755, Nodes_count: 24246, Cost Time: 2030.82s\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Time: 2019-05-14 17:05:10.964000000~2019-05-14 17:20:12.227000000, Loss: 0.6217, Nodes_count: 24522, Cost Time: 2080.32s\n",
      "Time: 2019-05-14 17:20:12.227000000~2019-05-14 17:35:13.719000000, Loss: 0.6864, Nodes_count: 24651, Cost Time: 2087.85s\n",
      "Time: 2019-05-14 17:35:13.719000000~2019-05-14 17:50:26.379000000, Loss: 0.6389, Nodes_count: 25303, Cost Time: 2152.99s\n",
      "Time: 2019-05-14 17:50:26.379000000~2019-05-14 18:05:27.471000000, Loss: 0.6888, Nodes_count: 25776, Cost Time: 2185.38s\n",
      "Time: 2019-05-14 18:05:27.471000000~2019-05-14 18:20:29.235000000, Loss: 0.6603, Nodes_count: 27036, Cost Time: 2260.20s\n",
      "Time: 2019-05-14 18:20:29.235000000~2019-05-14 18:35:31.058000000, Loss: 0.6790, Nodes_count: 27745, Cost Time: 2318.42s\n",
      "Time: 2019-05-14 18:35:31.058000000~2019-05-14 18:50:32.147000000, Loss: 0.6484, Nodes_count: 28201, Cost Time: 2365.31s\n",
      "Time: 2019-05-14 18:50:32.147000000~2019-05-14 19:05:39.105000000, Loss: 0.6455, Nodes_count: 28709, Cost Time: 2399.31s\n",
      "Time: 2019-05-14 19:05:39.105000000~2019-05-14 19:22:38.029000000, Loss: 0.6423, Nodes_count: 28781, Cost Time: 2405.60s\n",
      "Time: 2019-05-14 19:22:38.029000000~2019-05-14 19:41:01.975000000, Loss: 0.8315, Nodes_count: 28797, Cost Time: 2406.23s\n",
      "Time: 2019-05-14 19:41:01.975000000~2019-05-14 19:59:18.996000000, Loss: 0.5166, Nodes_count: 28818, Cost Time: 2407.34s\n",
      "Time: 2019-05-14 19:59:18.996000000~2019-05-14 21:19:21.805000000, Loss: 0.6737, Nodes_count: 28878, Cost Time: 2407.42s\n",
      "Time: 2019-05-14 21:19:21.805000000~2019-05-14 22:39:40.614000000, Loss: 0.6892, Nodes_count: 28938, Cost Time: 2407.49s\n",
      "Time: 2019-05-14 22:39:40.614000000~2019-05-14 23:59:43.171000000, Loss: 0.7108, Nodes_count: 28992, Cost Time: 2407.56s\n"
     ]
    }
   ],
   "source": [
    "graph_5_14.to(device=device)\n",
    "ans_5_14=test_day_new(graph_5_14,\"graph_5_14\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "after merge: TemporalData(dst=[8379621], msg=[8379621, 42], src=[8379621], t=[8379621])\n",
      "Time: 2019-05-15 00:00:01.169000000~2019-05-15 01:20:28.968000000, Loss: 2.0546, Nodes_count: 64, Cost Time: 0.05s\n",
      "Time: 2019-05-15 01:20:28.968000000~2019-05-15 02:41:31.305000000, Loss: 1.2460, Nodes_count: 115, Cost Time: 0.11s\n",
      "Time: 2019-05-15 02:41:31.305000000~2019-05-15 04:02:33.803000000, Loss: 0.8808, Nodes_count: 167, Cost Time: 0.17s\n",
      "Time: 2019-05-15 04:02:33.803000000~2019-05-15 05:22:53.612000000, Loss: 1.3112, Nodes_count: 228, Cost Time: 0.25s\n",
      "Time: 2019-05-15 05:22:53.612000000~2019-05-15 06:42:56.141000000, Loss: 1.5039, Nodes_count: 293, Cost Time: 0.31s\n",
      "Time: 2019-05-15 06:42:56.141000000~2019-05-15 08:02:58.920000000, Loss: 1.0764, Nodes_count: 355, Cost Time: 0.38s\n",
      "Time: 2019-05-15 08:02:58.920000000~2019-05-15 09:24:01.092000000, Loss: 1.3291, Nodes_count: 406, Cost Time: 0.44s\n",
      "Time: 2019-05-15 09:24:01.092000000~2019-05-15 10:46:03.642000000, Loss: 1.0905, Nodes_count: 446, Cost Time: 0.51s\n",
      "Time: 2019-05-15 10:46:03.642000000~2019-05-15 11:06:20.806000000, Loss: 1.1718, Nodes_count: 605, Cost Time: 0.71s\n",
      "Time: 2019-05-15 11:06:20.806000000~2019-05-15 11:30:40.055000000, Loss: 1.0841, Nodes_count: 613, Cost Time: 0.90s\n",
      "Time: 2019-05-15 11:30:40.055000000~2019-05-15 12:37:07.284000000, Loss: 3.3509, Nodes_count: 614, Cost Time: 0.96s\n",
      "Time: 2019-05-15 12:37:07.284000000~2019-05-15 12:52:16.045000000, Loss: 1.2146, Nodes_count: 2373, Cost Time: 1.74s\n",
      "Time: 2019-05-15 12:52:16.045000000~2019-05-15 13:07:55.465000000, Loss: 0.7422, Nodes_count: 5577, Cost Time: 20.33s\n",
      "Time: 2019-05-15 13:07:55.465000000~2019-05-15 13:22:56.671000000, Loss: 0.4777, Nodes_count: 5644, Cost Time: 33.96s\n",
      "Time: 2019-05-15 13:22:56.671000000~2019-05-15 13:37:57.665000000, Loss: 0.4978, Nodes_count: 5690, Cost Time: 39.08s\n",
      "Time: 2019-05-15 13:37:57.665000000~2019-05-15 13:52:58.745000000, Loss: 0.5007, Nodes_count: 5752, Cost Time: 51.51s\n",
      "Time: 2019-05-15 13:52:58.745000000~2019-05-15 14:07:59.753000000, Loss: 0.4933, Nodes_count: 5787, Cost Time: 56.41s\n",
      "Time: 2019-05-15 14:07:59.753000000~2019-05-15 14:23:00.265000000, Loss: 0.6595, Nodes_count: 6256, Cost Time: 81.26s\n",
      "Time: 2019-05-15 14:23:00.265000000~2019-05-15 14:38:02.135000000, Loss: 0.5534, Nodes_count: 6323, Cost Time: 86.14s\n",
      "Time: 2019-05-15 14:38:02.135000000~2019-05-15 14:53:31.210000000, Loss: 0.6690, Nodes_count: 7220, Cost Time: 140.26s\n",
      "Time: 2019-05-15 14:53:31.210000000~2019-05-15 15:08:41.249000000, Loss: 0.6780, Nodes_count: 8248, Cost Time: 170.36s\n",
      "Time: 2019-05-15 15:08:41.249000000~2019-05-15 15:23:58.047000000, Loss: 0.6464, Nodes_count: 8686, Cost Time: 206.67s\n",
      "Time: 2019-05-15 15:23:58.047000000~2019-05-15 15:38:59.175000000, Loss: 0.6627, Nodes_count: 8978, Cost Time: 229.91s\n",
      "Time: 2019-05-15 15:38:59.175000000~2019-05-15 15:55:38.955000000, Loss: 0.6576, Nodes_count: 9366, Cost Time: 261.67s\n",
      "Time: 2019-05-15 15:55:38.955000000~2019-05-15 16:11:27.687000000, Loss: 0.5148, Nodes_count: 9447, Cost Time: 266.98s\n",
      "Time: 2019-05-15 16:11:27.687000000~2019-05-15 16:26:39.781000000, Loss: 0.6340, Nodes_count: 10447, Cost Time: 294.33s\n",
      "Time: 2019-05-15 16:26:39.781000000~2019-05-15 16:41:48.642000000, Loss: 0.6751, Nodes_count: 11393, Cost Time: 366.94s\n",
      "Time: 2019-05-15 16:41:48.642000000~2019-05-15 16:56:48.656000000, Loss: 0.6881, Nodes_count: 12123, Cost Time: 419.42s\n",
      "Time: 2019-05-15 16:56:48.656000000~2019-05-15 17:11:55.171000000, Loss: 0.6791, Nodes_count: 12508, Cost Time: 438.57s\n",
      "Time: 2019-05-15 17:11:55.171000000~2019-05-15 17:27:58.721000000, Loss: 0.6455, Nodes_count: 13515, Cost Time: 470.67s\n",
      "Time: 2019-05-15 17:27:58.721000000~2019-05-15 17:43:17.531000000, Loss: 0.6758, Nodes_count: 14146, Cost Time: 509.35s\n",
      "Time: 2019-05-15 17:43:17.531000000~2019-05-15 17:58:19.189000000, Loss: 0.6359, Nodes_count: 15054, Cost Time: 553.70s\n",
      "Time: 2019-05-15 17:58:19.189000000~2019-05-15 18:14:08.311000000, Loss: 0.6750, Nodes_count: 16676, Cost Time: 611.81s\n",
      "Time: 2019-05-15 18:14:08.311000000~2019-05-15 18:29:15.893000000, Loss: 0.6592, Nodes_count: 17295, Cost Time: 669.59s\n",
      "Time: 2019-05-15 18:29:15.893000000~2019-05-15 18:44:17.260000000, Loss: 0.6834, Nodes_count: 18121, Cost Time: 726.57s\n",
      "Time: 2019-05-15 18:44:17.260000000~2019-05-15 18:59:18.387000000, Loss: 0.6381, Nodes_count: 18395, Cost Time: 762.70s\n",
      "Time: 2019-05-15 18:59:18.387000000~2019-05-15 19:14:19.745000000, Loss: 0.6656, Nodes_count: 19182, Cost Time: 821.77s\n",
      "Time: 2019-05-15 19:14:19.745000000~2019-05-15 19:29:20.800000000, Loss: 0.6645, Nodes_count: 19884, Cost Time: 896.80s\n",
      "Time: 2019-05-15 19:29:20.800000000~2019-05-15 19:44:22.319000000, Loss: 0.6708, Nodes_count: 20455, Cost Time: 945.76s\n",
      "Time: 2019-05-15 19:44:22.319000000~2019-05-15 19:59:22.601000000, Loss: 0.6451, Nodes_count: 20708, Cost Time: 977.94s\n",
      "Time: 2019-05-15 19:59:22.601000000~2019-05-15 20:14:24.590000000, Loss: 0.6766, Nodes_count: 21876, Cost Time: 1071.11s\n",
      "Time: 2019-05-15 20:14:24.590000000~2019-05-15 20:29:25.523000000, Loss: 0.6571, Nodes_count: 22337, Cost Time: 1127.44s\n",
      "Time: 2019-05-15 20:29:25.523000000~2019-05-15 20:44:26.676000000, Loss: 0.6721, Nodes_count: 22764, Cost Time: 1164.71s\n",
      "Time: 2019-05-15 20:44:26.676000000~2019-05-15 20:59:27.020000000, Loss: 0.6570, Nodes_count: 23279, Cost Time: 1222.79s\n",
      "Time: 2019-05-15 20:59:27.020000000~2019-05-15 21:14:28.862000000, Loss: 0.6656, Nodes_count: 23709, Cost Time: 1260.30s\n",
      "Time: 2019-05-15 21:14:28.862000000~2019-05-15 21:29:30.066000000, Loss: 0.6605, Nodes_count: 24297, Cost Time: 1328.43s\n",
      "Time: 2019-05-15 21:29:30.066000000~2019-05-15 21:44:31.381000000, Loss: 0.6729, Nodes_count: 24742, Cost Time: 1365.96s\n",
      "Time: 2019-05-15 21:44:31.381000000~2019-05-15 21:59:32.550000000, Loss: 0.6553, Nodes_count: 25232, Cost Time: 1424.23s\n",
      "Time: 2019-05-15 21:59:32.550000000~2019-05-15 22:14:33.792000000, Loss: 0.6745, Nodes_count: 25651, Cost Time: 1462.61s\n",
      "Time: 2019-05-15 22:14:33.792000000~2019-05-15 22:29:35.074000000, Loss: 0.6474, Nodes_count: 25999, Cost Time: 1507.37s\n",
      "Time: 2019-05-15 22:29:35.074000000~2019-05-15 22:44:36.431000000, Loss: 0.6945, Nodes_count: 26364, Cost Time: 1538.06s\n",
      "Time: 2019-05-15 22:44:36.431000000~2019-05-15 22:59:37.524000000, Loss: 0.6545, Nodes_count: 26718, Cost Time: 1585.01s\n",
      "Time: 2019-05-15 22:59:37.524000000~2019-05-15 23:14:38.671000000, Loss: 0.6692, Nodes_count: 27306, Cost Time: 1638.47s\n",
      "Time: 2019-05-15 23:14:38.671000000~2019-05-15 23:29:39.881000000, Loss: 0.6525, Nodes_count: 27583, Cost Time: 1678.61s\n",
      "Time: 2019-05-15 23:29:39.881000000~2019-05-15 23:44:41.019000000, Loss: 0.6794, Nodes_count: 28054, Cost Time: 1724.94s\n"
     ]
    }
   ],
   "source": [
    "ans_5_15=test_day_new(graph_5_15,\"graph_5_15\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "after merge: TemporalData(dst=[7520878], msg=[7520878, 42], src=[7520878], t=[7520878])\n",
      "Time: 2019-05-17 00:00:00.162000000~2019-05-17 00:17:05.023000000, Loss: 0.5421, Nodes_count: 801, Cost Time: 15.61s\n",
      "Time: 2019-05-17 00:17:05.023000000~2019-05-17 00:32:06.529000000, Loss: 0.5200, Nodes_count: 860, Cost Time: 20.06s\n",
      "Time: 2019-05-17 00:32:06.529000000~2019-05-17 00:47:07.880000000, Loss: 0.4834, Nodes_count: 955, Cost Time: 41.46s\n",
      "Time: 2019-05-17 00:47:07.880000000~2019-05-17 01:02:09.047000000, Loss: 0.4778, Nodes_count: 1003, Cost Time: 45.97s\n",
      "Time: 2019-05-17 01:02:09.047000000~2019-05-17 01:17:10.410000000, Loss: 0.4721, Nodes_count: 1059, Cost Time: 67.84s\n",
      "Time: 2019-05-17 01:17:10.410000000~2019-05-17 01:32:11.778000000, Loss: 0.5005, Nodes_count: 1111, Cost Time: 72.82s\n",
      "Time: 2019-05-17 01:32:11.778000000~2019-05-17 01:47:12.976000000, Loss: 0.4731, Nodes_count: 1167, Cost Time: 94.07s\n",
      "Time: 2019-05-17 01:47:12.976000000~2019-05-17 02:02:14.158000000, Loss: 0.4686, Nodes_count: 1215, Cost Time: 98.87s\n",
      "Time: 2019-05-17 02:02:14.158000000~2019-05-17 02:17:15.482000000, Loss: 0.4709, Nodes_count: 1270, Cost Time: 121.56s\n",
      "Time: 2019-05-17 02:17:15.482000000~2019-05-17 02:32:16.882000000, Loss: 0.4947, Nodes_count: 1328, Cost Time: 125.88s\n",
      "Time: 2019-05-17 02:32:16.882000000~2019-05-17 02:47:18.043000000, Loss: 0.4765, Nodes_count: 1379, Cost Time: 147.55s\n",
      "Time: 2019-05-17 02:47:18.043000000~2019-05-17 03:02:19.211000000, Loss: 0.4871, Nodes_count: 1440, Cost Time: 152.28s\n",
      "Time: 2019-05-17 03:02:19.211000000~2019-05-17 03:17:20.492000000, Loss: 0.4725, Nodes_count: 1487, Cost Time: 175.17s\n",
      "Time: 2019-05-17 03:17:20.492000000~2019-05-17 03:32:21.866000000, Loss: 0.4960, Nodes_count: 1537, Cost Time: 179.74s\n",
      "Time: 2019-05-17 03:32:21.866000000~2019-05-17 03:47:23.177000000, Loss: 0.4703, Nodes_count: 1582, Cost Time: 200.41s\n",
      "Time: 2019-05-17 03:47:23.177000000~2019-05-17 04:02:24.594000000, Loss: 0.4784, Nodes_count: 1641, Cost Time: 204.97s\n",
      "Time: 2019-05-17 04:02:24.594000000~2019-05-17 04:17:25.489000000, Loss: 0.4716, Nodes_count: 1688, Cost Time: 226.69s\n",
      "Time: 2019-05-17 04:17:25.489000000~2019-05-17 04:32:26.948000000, Loss: 0.4838, Nodes_count: 1738, Cost Time: 231.21s\n",
      "Time: 2019-05-17 04:32:26.948000000~2019-05-17 04:47:28.259000000, Loss: 0.4748, Nodes_count: 1788, Cost Time: 253.85s\n",
      "Time: 2019-05-17 04:47:28.259000000~2019-05-17 05:02:29.618000000, Loss: 0.5168, Nodes_count: 1848, Cost Time: 258.60s\n",
      "Time: 2019-05-17 05:02:29.618000000~2019-05-17 05:17:30.624000000, Loss: 0.4736, Nodes_count: 1897, Cost Time: 279.36s\n",
      "Time: 2019-05-17 05:17:30.624000000~2019-05-17 05:32:31.961000000, Loss: 0.4813, Nodes_count: 1949, Cost Time: 283.97s\n",
      "Time: 2019-05-17 05:32:31.961000000~2019-05-17 05:47:33.352000000, Loss: 0.4723, Nodes_count: 2012, Cost Time: 305.01s\n",
      "Time: 2019-05-17 05:47:33.352000000~2019-05-17 06:02:34.706000000, Loss: 0.4876, Nodes_count: 2054, Cost Time: 309.43s\n",
      "Time: 2019-05-17 06:02:34.706000000~2019-05-17 06:17:36.113000000, Loss: 0.4742, Nodes_count: 2099, Cost Time: 330.97s\n",
      "Time: 2019-05-17 06:17:36.113000000~2019-05-17 06:32:37.454000000, Loss: 0.4994, Nodes_count: 2147, Cost Time: 335.75s\n",
      "Time: 2019-05-17 06:32:37.454000000~2019-05-17 06:47:38.801000000, Loss: 0.4729, Nodes_count: 2198, Cost Time: 357.43s\n",
      "Time: 2019-05-17 06:47:38.801000000~2019-05-17 07:02:40.131000000, Loss: 0.4775, Nodes_count: 2244, Cost Time: 362.04s\n",
      "Time: 2019-05-17 07:02:40.131000000~2019-05-17 07:17:41.404000000, Loss: 0.4703, Nodes_count: 2309, Cost Time: 383.11s\n",
      "Time: 2019-05-17 07:17:41.404000000~2019-05-17 07:32:42.668000000, Loss: 0.4925, Nodes_count: 2365, Cost Time: 387.49s\n",
      "Time: 2019-05-17 07:32:42.668000000~2019-05-17 07:47:44.085000000, Loss: 0.4704, Nodes_count: 2415, Cost Time: 408.91s\n",
      "Time: 2019-05-17 07:47:44.085000000~2019-05-17 08:02:45.378000000, Loss: 0.4730, Nodes_count: 2461, Cost Time: 413.67s\n",
      "Time: 2019-05-17 08:02:45.378000000~2019-05-17 08:17:46.281000000, Loss: 0.4698, Nodes_count: 2512, Cost Time: 436.21s\n",
      "Time: 2019-05-17 08:17:46.281000000~2019-05-17 08:32:47.467000000, Loss: 0.5136, Nodes_count: 2560, Cost Time: 440.87s\n",
      "Time: 2019-05-17 08:32:47.467000000~2019-05-17 08:47:48.296000000, Loss: 0.4789, Nodes_count: 2605, Cost Time: 461.62s\n",
      "Time: 2019-05-17 08:47:48.296000000~2019-05-17 09:02:49.657000000, Loss: 0.6890, Nodes_count: 2926, Cost Time: 471.84s\n",
      "Time: 2019-05-17 09:02:49.657000000~2019-05-17 09:17:51.260000000, Loss: 0.6573, Nodes_count: 3559, Cost Time: 513.88s\n",
      "Time: 2019-05-17 09:17:51.260000000~2019-05-17 09:32:52.628000000, Loss: 0.6795, Nodes_count: 4617, Cost Time: 567.69s\n",
      "Time: 2019-05-17 09:32:52.628000000~2019-05-17 09:47:52.897000000, Loss: 0.6624, Nodes_count: 5569, Cost Time: 659.46s\n",
      "Time: 2019-05-17 09:47:52.897000000~2019-05-17 10:02:57.226000000, Loss: 0.6826, Nodes_count: 6138, Cost Time: 703.91s\n",
      "Time: 2019-05-17 10:02:57.226000000~2019-05-17 10:17:59.598000000, Loss: 0.6714, Nodes_count: 7139, Cost Time: 776.48s\n",
      "Time: 2019-05-17 10:17:59.598000000~2019-05-17 10:33:01.000000000, Loss: 0.6760, Nodes_count: 8383, Cost Time: 865.75s\n",
      "Time: 2019-05-17 10:33:01.000000000~2019-05-17 10:48:02.004000000, Loss: 0.6449, Nodes_count: 8836, Cost Time: 924.25s\n",
      "Time: 2019-05-17 10:48:02.004000000~2019-05-17 11:03:03.339000000, Loss: 0.6834, Nodes_count: 9283, Cost Time: 958.04s\n",
      "Time: 2019-05-17 11:03:03.339000000~2019-05-17 11:18:04.107000000, Loss: 0.6404, Nodes_count: 9707, Cost Time: 1017.65s\n",
      "Time: 2019-05-17 11:18:04.107000000~2019-05-17 11:33:05.441000000, Loss: 0.6772, Nodes_count: 10260, Cost Time: 1061.50s\n",
      "Time: 2019-05-17 11:33:05.441000000~2019-05-17 11:48:07.561000000, Loss: 0.6586, Nodes_count: 11060, Cost Time: 1142.15s\n",
      "Time: 2019-05-17 11:48:07.561000000~2019-05-17 12:03:08.991000000, Loss: 0.6794, Nodes_count: 11620, Cost Time: 1183.69s\n",
      "Time: 2019-05-17 12:03:08.991000000~2019-05-17 12:18:10.191000000, Loss: 0.6015, Nodes_count: 11781, Cost Time: 1219.96s\n",
      "Time: 2019-05-17 12:18:10.191000000~2019-05-17 12:33:11.434000000, Loss: 0.6657, Nodes_count: 11988, Cost Time: 1237.29s\n",
      "Time: 2019-05-17 12:33:11.434000000~2019-05-17 12:48:13.087000000, Loss: 0.6477, Nodes_count: 12547, Cost Time: 1300.46s\n",
      "Time: 2019-05-17 12:48:13.087000000~2019-05-17 13:03:13.328000000, Loss: 0.6769, Nodes_count: 12794, Cost Time: 1321.02s\n",
      "Time: 2019-05-17 13:03:13.328000000~2019-05-17 13:18:15.486000000, Loss: 0.6505, Nodes_count: 13420, Cost Time: 1400.82s\n",
      "Time: 2019-05-17 13:18:15.486000000~2019-05-17 13:33:16.997000000, Loss: 0.6750, Nodes_count: 13752, Cost Time: 1424.70s\n",
      "Time: 2019-05-17 13:33:16.997000000~2019-05-17 13:50:01.226000000, Loss: 0.6481, Nodes_count: 17455, Cost Time: 1493.21s\n",
      "Time: 2019-05-17 13:50:01.226000000~2019-05-17 14:05:21.553000000, Loss: 0.6674, Nodes_count: 18148, Cost Time: 1560.58s\n",
      "Time: 2019-05-17 14:05:21.553000000~2019-05-17 14:20:43.541000000, Loss: 0.6611, Nodes_count: 18680, Cost Time: 1618.31s\n",
      "Time: 2019-05-17 14:20:43.541000000~2019-05-17 14:35:45.993000000, Loss: 0.6782, Nodes_count: 19501, Cost Time: 1687.82s\n",
      "Time: 2019-05-17 14:35:45.993000000~2019-05-17 14:50:52.897000000, Loss: 0.6678, Nodes_count: 20147, Cost Time: 1743.34s\n",
      "Time: 2019-05-17 14:50:52.897000000~2019-05-17 15:06:00.371000000, Loss: 0.6960, Nodes_count: 20557, Cost Time: 1773.20s\n",
      "Time: 2019-05-17 15:06:00.371000000~2019-05-17 15:21:40.474000000, Loss: 0.6246, Nodes_count: 20751, Cost Time: 1796.30s\n",
      "Time: 2019-05-17 15:21:40.474000000~2019-05-17 15:36:41.964000000, Loss: 0.7209, Nodes_count: 20827, Cost Time: 1803.09s\n",
      "Time: 2019-05-17 15:36:41.964000000~2019-05-17 15:51:43.493000000, Loss: 0.5373, Nodes_count: 20883, Cost Time: 1817.03s\n",
      "Time: 2019-05-17 15:51:43.493000000~2019-05-17 16:06:44.953000000, Loss: 0.7666, Nodes_count: 20950, Cost Time: 1823.30s\n",
      "Time: 2019-05-17 16:06:44.953000000~2019-05-17 16:21:46.485000000, Loss: 0.5736, Nodes_count: 21005, Cost Time: 1838.72s\n",
      "Time: 2019-05-17 16:21:46.485000000~2019-05-17 16:36:47.843000000, Loss: 0.7083, Nodes_count: 21063, Cost Time: 1845.22s\n",
      "Time: 2019-05-17 16:36:47.843000000~2019-05-17 16:51:49.352000000, Loss: 0.5544, Nodes_count: 21129, Cost Time: 1859.42s\n",
      "Time: 2019-05-17 16:51:49.352000000~2019-05-17 17:06:50.973000000, Loss: 0.6453, Nodes_count: 21171, Cost Time: 1864.92s\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Time: 2019-05-17 17:06:50.973000000~2019-05-17 17:21:51.831000000, Loss: 0.5506, Nodes_count: 21215, Cost Time: 1878.68s\n",
      "Time: 2019-05-17 17:21:51.831000000~2019-05-17 17:36:53.217000000, Loss: 0.6196, Nodes_count: 21254, Cost Time: 1884.60s\n",
      "Time: 2019-05-17 17:36:53.217000000~2019-05-17 17:51:54.220000000, Loss: 0.5513, Nodes_count: 21298, Cost Time: 1898.22s\n",
      "Time: 2019-05-17 17:51:54.220000000~2019-05-17 18:06:55.854000000, Loss: 0.6136, Nodes_count: 21337, Cost Time: 1903.64s\n",
      "Time: 2019-05-17 18:06:55.854000000~2019-05-17 18:21:57.246000000, Loss: 0.5378, Nodes_count: 21379, Cost Time: 1918.97s\n",
      "Time: 2019-05-17 18:21:57.246000000~2019-05-17 18:36:58.618000000, Loss: 0.6231, Nodes_count: 21417, Cost Time: 1924.50s\n",
      "Time: 2019-05-17 18:36:58.618000000~2019-05-17 18:52:00.027000000, Loss: 0.5331, Nodes_count: 21469, Cost Time: 1939.00s\n",
      "Time: 2019-05-17 18:52:00.027000000~2019-05-17 19:07:01.518000000, Loss: 0.6418, Nodes_count: 21514, Cost Time: 1944.11s\n",
      "Time: 2019-05-17 19:07:01.518000000~2019-05-17 19:22:02.614000000, Loss: 0.5439, Nodes_count: 21559, Cost Time: 1959.19s\n",
      "Time: 2019-05-17 19:22:02.614000000~2019-05-17 19:37:03.821000000, Loss: 0.7388, Nodes_count: 21600, Cost Time: 1963.71s\n",
      "Time: 2019-05-17 19:37:03.821000000~2019-05-17 19:52:04.945000000, Loss: 0.5374, Nodes_count: 21646, Cost Time: 1979.27s\n",
      "Time: 2019-05-17 19:52:04.945000000~2019-05-17 20:07:06.424000000, Loss: 0.7484, Nodes_count: 21720, Cost Time: 1983.82s\n",
      "Time: 2019-05-17 20:07:06.424000000~2019-05-17 20:22:07.965000000, Loss: 0.5459, Nodes_count: 21764, Cost Time: 2000.69s\n",
      "Time: 2019-05-17 20:22:07.965000000~2019-05-17 20:37:09.206000000, Loss: 0.7264, Nodes_count: 21805, Cost Time: 2005.52s\n",
      "Time: 2019-05-17 20:37:09.206000000~2019-05-17 20:52:10.643000000, Loss: 0.5342, Nodes_count: 21845, Cost Time: 2022.45s\n",
      "Time: 2019-05-17 20:52:10.643000000~2019-05-17 21:07:12.192000000, Loss: 0.6534, Nodes_count: 21882, Cost Time: 2027.19s\n",
      "Time: 2019-05-17 21:07:12.192000000~2019-05-17 21:22:13.697000000, Loss: 0.5405, Nodes_count: 21930, Cost Time: 2044.52s\n",
      "Time: 2019-05-17 21:22:13.697000000~2019-05-17 21:37:14.934000000, Loss: 0.6149, Nodes_count: 21977, Cost Time: 2049.28s\n",
      "Time: 2019-05-17 21:37:14.934000000~2019-05-17 21:52:16.493000000, Loss: 0.5298, Nodes_count: 22029, Cost Time: 2066.49s\n",
      "Time: 2019-05-17 21:52:16.493000000~2019-05-17 22:07:18.054000000, Loss: 0.6168, Nodes_count: 22070, Cost Time: 2071.39s\n",
      "Time: 2019-05-17 22:07:18.054000000~2019-05-17 22:22:19.501000000, Loss: 0.5287, Nodes_count: 22110, Cost Time: 2087.95s\n",
      "Time: 2019-05-17 22:22:19.501000000~2019-05-17 22:37:20.816000000, Loss: 0.6281, Nodes_count: 22159, Cost Time: 2092.61s\n",
      "Time: 2019-05-17 22:37:20.816000000~2019-05-17 22:52:22.392000000, Loss: 0.5299, Nodes_count: 22196, Cost Time: 2109.72s\n",
      "Time: 2019-05-17 22:52:22.392000000~2019-05-17 23:07:23.887000000, Loss: 0.6206, Nodes_count: 22237, Cost Time: 2114.79s\n",
      "Time: 2019-05-17 23:07:23.887000000~2019-05-17 23:22:25.304000000, Loss: 0.5267, Nodes_count: 22293, Cost Time: 2131.48s\n",
      "Time: 2019-05-17 23:22:25.304000000~2019-05-17 23:37:26.760000000, Loss: 0.6184, Nodes_count: 22341, Cost Time: 2136.05s\n",
      "Time: 2019-05-17 23:37:26.760000000~2019-05-17 23:52:28.261000000, Loss: 0.5240, Nodes_count: 22375, Cost Time: 2153.54s\n"
     ]
    }
   ],
   "source": [
    "ans_5_17=test_day_new(graph_5_17,\"graph_5_17\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Initialize the node IDF"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|████████████████████████████████████████████████████████████████████████████████████████| 153/153 [02:32<00:00,  1.00it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "IDF weight calculate complete!\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n"
     ]
    }
   ],
   "source": [
    "# share_node_IDF = mp.Manager().dict()\n",
    "\n",
    "node_set=set()\n",
    "\n",
    "file_list=[]\n",
    "\n",
    "\n",
    "file_path=\"graph_5_9/\"\n",
    "file_l=os.listdir(\"graph_5_9/\")\n",
    "for i in file_l:\n",
    "    file_list.append(file_path+i)\n",
    "\n",
    "file_path=\"graph_5_11/\"\n",
    "file_l=os.listdir(\"graph_5_11/\")\n",
    "for i in file_l:\n",
    "    file_list.append(file_path+i)\n",
    "\n",
    "\n",
    "file_path=\"graph_5_12/\"\n",
    "file_l=os.listdir(\"graph_5_12/\")\n",
    "for i in file_l:\n",
    "    file_list.append(file_path+i)\n",
    "\n",
    "node_IDF={}\n",
    "node_set = {}\n",
    "for f_path in tqdm(file_list):\n",
    "    f=open(f_path)\n",
    "    for line in f:\n",
    "        l=line.strip()\n",
    "        jdata=eval(l)\n",
    "        jdata=eval(l)\n",
    "        if jdata['loss']>0:\n",
    "            if 'netflow' not in str(jdata['srcmsg']):\n",
    "                if str(jdata['srcmsg']) not in node_set.keys():\n",
    "                    node_set[str(jdata['srcmsg'])] = set([f_path])\n",
    "                else:\n",
    "                    node_set[str(jdata['srcmsg'])].add(f_path)\n",
    "            if 'netflow' not in str(jdata['dstmsg']):\n",
    "                if str(jdata['dstmsg']) not in node_set.keys():\n",
    "                    node_set[str(jdata['dstmsg'])] = set([f_path])\n",
    "                else:\n",
    "                    node_set[str(jdata['dstmsg'])].add(f_path)\n",
    "for n in node_set:\n",
    "    include_count = len(node_set[n])   \n",
    "    IDF=math.log(len(file_list)/(include_count+1))\n",
    "    node_IDF[n] = IDF    \n",
    "\n",
    "\n",
    "torch.save(node_IDF,\"node_IDF_5_9_12\")\n",
    "print(\"IDF weight calculate complete!\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "24376"
      ]
     },
     "execution_count": 22,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "len(node_IDF)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [],
   "source": [
    "file_list_5_9_12 = file_list"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {},
   "outputs": [],
   "source": [
    "def cal_train_IDF(find_str,file_list):\n",
    "    include_count=0\n",
    "    for f_path in (file_list):\n",
    "        f=open(f_path)\n",
    "        if find_str in f.read():\n",
    "            include_count+=1             \n",
    "    IDF=math.log(len(file_list)/(include_count+1))\n",
    "    return IDF\n",
    "\n",
    "\n",
    "def cal_IDF(find_str,file_path,file_list):\n",
    "    file_list=os.listdir(file_path)\n",
    "    include_count=0\n",
    "    different_neighbor=set()\n",
    "    for f_path in (file_list):\n",
    "        f=open(file_path+f_path)\n",
    "        if find_str in f.read():\n",
    "            include_count+=1                \n",
    "                \n",
    "    IDF=math.log(len(file_list)/(include_count+1))\n",
    "    \n",
    "    return IDF,1\n",
    "\n",
    "def cal_redundant(find_str,edge_list):\n",
    "    \n",
    "    different_neighbor=set()\n",
    "    for e in edge_list:\n",
    "        if find_str in str(e):\n",
    "            different_neighbor.add(e[0])\n",
    "            different_neighbor.add(e[1])\n",
    "    return len(different_neighbor)-2\n",
    "\n",
    "def cal_anomaly_loss(loss_list,edge_list,file_path):\n",
    "    \n",
    "    if len(loss_list)!=len(edge_list):\n",
    "        print(\"error!\")\n",
    "        return 0\n",
    "    count=0\n",
    "    loss_sum=0\n",
    "    loss_std=std(loss_list)\n",
    "    loss_mean=mean(loss_list)\n",
    "    edge_set=set()\n",
    "    node_set=set()\n",
    "    node2redundant={}\n",
    "    \n",
    "    thr=loss_mean+1.5*loss_std\n",
    "\n",
    "    print(\"thr:\",thr)\n",
    "\n",
    "    for i in range(len(loss_list)):\n",
    "        if loss_list[i]>thr:\n",
    "            count+=1\n",
    "            src_node=edge_list[i][0]\n",
    "            dst_node=edge_list[i][1]\n",
    "            \n",
    "            loss_sum+=loss_list[i]\n",
    "    \n",
    "            node_set.add(src_node)\n",
    "            node_set.add(dst_node)\n",
    "            edge_set.add(edge_list[i][0]+edge_list[i][1])\n",
    "    return count, loss_sum/(count + 0.000000001) ,node_set,edge_set\n",
    "#     return count, count/len(loss_list)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Construct the relations between time windows"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [],
   "source": [
    "# node_IDF=torch.load(\"node_IDF\")\n",
    "\n",
    "\n",
    "def cal_set_rel_bak(s1,s2,file_list):\n",
    "    new_s=s1 & s2\n",
    "    count=0\n",
    "    for i in new_s:\n",
    "#     jdata=json.loads(i)\n",
    "        if 'netflow' not in i and '/data/system/' not in i \\\n",
    "            and '/storage/emulated/' not in i \\\n",
    "            and  '/data/data/com.android' not in i \\\n",
    "            and  '/proc/' not in i \\\n",
    "            and '/sys/devices/' not in i \\\n",
    "            and 'org.mozilla.fennec_vagrant' not in i \\\n",
    "            and 'mark.via.gp' not in i \\\n",
    "            and '/data/system_ce/' not in i \\\n",
    "            and '/Camera' not in i \\\n",
    "            and 'kohimovie.info.kohimovies' not in i \\\n",
    "            and '.dziauz.tinyflashlight' not in i \\\n",
    "            and 'com.' not in i \\\n",
    "            and 'android.process.media' not in i \\\n",
    "            and 'temp-index' not in i \\\n",
    "            and '/dev/binder' not in i \\\n",
    "            and 'vanilla' not in i:                       \n",
    "        \n",
    "        #             and 'screencap' not in i \\\n",
    "        \n",
    "#         and '.dziauz.tinyflashlight' not in i \\\n",
    "#             and '/data/system_ce/ not in i \\\n",
    "            \n",
    "#         and 'usr' not in i and 'proc' not in i and '675' not in i and 'firefox' not in i and 'tmp' not in i and 'thunderbird' not in i\n",
    "#         'netflow' not in i\n",
    "#         and 'usr' not in i and 'var' not in i\n",
    "            if i in node_IDF.keys():\n",
    "                IDF=node_IDF[i]\n",
    "            else:\n",
    "                IDF=math.log(len(file_list)/(1))           \n",
    "                   \n",
    "#             print(IDF)\n",
    "#             print(len(file_list))\n",
    "            if IDF>math.log(len(file_list)*0.9/(1))  :\n",
    "#             if IDF>4.7:\n",
    "                print(\"node:\",i,\" IDF:\",IDF)\n",
    "                count+=1\n",
    "    return count"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# label generation"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {},
   "outputs": [],
   "source": [
    "labels={}\n",
    "pred_label={}\n",
    "    \n",
    "filelist = os.listdir(\"graph_5_14\")\n",
    "for f in filelist:\n",
    "    labels[\"graph_5_14/\"+f]=0\n",
    "    pred_label[\"graph_5_14/\"+f]=0\n",
    "\n",
    "filelist = os.listdir(\"graph_5_15\")\n",
    "for f in filelist:\n",
    "    labels[\"graph_5_15/\"+f]=0\n",
    "    pred_label[\"graph_5_15/\"+f]=0\n",
    "    \n",
    "filelist = os.listdir(\"graph_5_17\")\n",
    "for f in filelist:\n",
    "    labels[\"graph_5_17/\"+f]=0\n",
    "    pred_label[\"graph_5_17/\"+f]=0"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {},
   "outputs": [],
   "source": [
    "attack_list=[    \n",
    "    'graph_5_15/2019-05-15 14:07:59.753000000~2019-05-15 14:23:00.265000000.txt',\n",
    "    'graph_5_15/2019-05-15 14:23:00.265000000~2019-05-15 14:38:02.135000000.txt',\n",
    "    'graph_5_15/2019-05-15 15:38:59.175000000~2019-05-15 15:55:38.955000000.txt', \n",
    "    'graph_5_15/2019-05-15 15:55:38.955000000~2019-05-15 16:11:27.687000000.txt',    \n",
    "\n",
    "    'graph_5_17/2019-05-17 14:50:52.897000000~2019-05-17 15:06:00.371000000.txt', \n",
    "    'graph_5_17/2019-05-17 15:06:00.371000000~2019-05-17 15:21:40.474000000.txt', \n",
    "    'graph_5_17/2019-05-17 15:21:40.474000000~2019-05-17 15:36:41.964000000.txt', \n",
    "    #     'graph_5_17/2019-05-17 15:36:41.964000000~2019-05-17 15:51:43.493000000.txt',\n",
    "    'graph_5_17/2019-05-17 15:51:43.493000000~2019-05-17 16:06:44.953000000.txt', \n",
    "    'graph_5_17/2019-05-17 16:21:46.485000000~2019-05-17 16:36:47.843000000.txt', \n",
    "    'graph_5_17/2019-05-17 16:36:47.843000000~2019-05-17 16:51:49.352000000.txt'\n",
    "\n",
    "]\n",
    "\n",
    "# graph_5_17/2019-05-17 11:48:07.561000000~2019-05-17 12:03:08.991000000.txt  # might be anomalous\n",
    "for i in labels:\n",
    "    if i in attack_list:\n",
    "        labels[i]=1\n",
    "    else:\n",
    "        labels[i]=0\n",
    "\n",
    "# for i in attack_list:\n",
    "#     labels[i]=1"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'graph_5_14/2019-05-14 12:02:56.496000000~2019-05-14 12:17:57.806000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 21:19:21.805000000~2019-05-14 22:39:40.614000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 15:20:00.233000000~2019-05-14 15:35:02.947000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 09:17:42.517000000~2019-05-14 09:32:44.093000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 15:50:03.958000000~2019-05-14 16:05:05.450000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 01:01:59.206000000~2019-05-14 01:17:00.513000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 08:47:39.890000000~2019-05-14 09:02:41.157000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 11:47:54.952000000~2019-05-14 12:02:56.496000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 06:32:28.279000000~2019-05-14 06:47:29.637000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 04:32:17.569000000~2019-05-14 04:47:18.740000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 08:17:37.204000000~2019-05-14 08:32:38.759000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 07:17:32.480000000~2019-05-14 07:32:33.922000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 12:32:59.282000000~2019-05-14 12:48:00.531000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 02:47:08.231000000~2019-05-14 03:02:09.643000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 02:17:05.742000000~2019-05-14 02:32:06.894000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 10:32:48.904000000~2019-05-14 10:47:49.773000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 12:48:00.531000000~2019-05-14 13:03:02.064000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 14:48:12.419000000~2019-05-14 15:04:01.280000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 10:17:47.485000000~2019-05-14 10:32:48.904000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 14:03:07.325000000~2019-05-14 14:18:08.816000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 12:17:57.806000000~2019-05-14 12:32:59.282000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 17:35:13.719000000~2019-05-14 17:50:26.379000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 11:02:51.213000000~2019-05-14 11:17:52.335000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 10:02:46.433000000~2019-05-14 10:17:47.485000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 03:17:10.873000000~2019-05-14 03:32:12.207000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 18:05:27.471000000~2019-05-14 18:20:29.235000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 13:33:03.664000000~2019-05-14 13:48:04.514000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 19:05:39.105000000~2019-05-14 19:22:38.029000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 06:47:29.637000000~2019-05-14 07:02:31.171000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 10:47:49.773000000~2019-05-14 11:02:51.213000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 18:20:29.235000000~2019-05-14 18:35:31.058000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 22:39:40.614000000~2019-05-14 23:59:43.171000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 04:47:18.740000000~2019-05-14 05:02:20.031000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 06:17:26.839000000~2019-05-14 06:32:28.279000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 05:32:22.709000000~2019-05-14 05:47:23.937000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 05:02:20.031000000~2019-05-14 05:17:21.435000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 09:02:41.157000000~2019-05-14 09:17:42.517000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 05:47:23.937000000~2019-05-14 06:02:25.486000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 14:33:09.949000000~2019-05-14 14:48:12.419000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 15:35:02.947000000~2019-05-14 15:50:03.958000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 13:03:02.064000000~2019-05-14 13:18:02.860000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 16:50:09.477000000~2019-05-14 17:05:10.964000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 03:02:09.643000000~2019-05-14 03:17:10.873000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 02:32:06.894000000~2019-05-14 02:47:08.231000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 01:32:01.927000000~2019-05-14 01:47:02.953000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 16:05:05.450000000~2019-05-14 16:20:06.873000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 13:48:04.514000000~2019-05-14 14:03:07.325000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 18:35:31.058000000~2019-05-14 18:50:32.147000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 00:46:57.832000000~2019-05-14 01:01:59.206000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 03:32:12.207000000~2019-05-14 03:47:13.548000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 11:17:52.335000000~2019-05-14 11:32:53.549000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 17:50:26.379000000~2019-05-14 18:05:27.471000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 07:32:33.922000000~2019-05-14 07:47:34.625000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 09:32:44.093000000~2019-05-14 09:47:44.971000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 00:00:00.110000000~2019-05-14 00:16:55.164000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 15:04:01.280000000~2019-05-14 15:20:00.233000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 19:22:38.029000000~2019-05-14 19:41:01.975000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 06:02:25.486000000~2019-05-14 06:17:26.839000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 01:47:02.953000000~2019-05-14 02:02:04.367000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 04:02:14.881000000~2019-05-14 04:17:16.184000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 09:47:44.971000000~2019-05-14 10:02:46.433000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 13:18:02.860000000~2019-05-14 13:33:03.664000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 07:02:31.171000000~2019-05-14 07:17:32.480000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 05:17:21.435000000~2019-05-14 05:32:22.709000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 01:17:00.513000000~2019-05-14 01:32:01.927000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 02:02:04.367000000~2019-05-14 02:17:05.742000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 17:05:10.964000000~2019-05-14 17:20:12.227000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 00:16:55.164000000~2019-05-14 00:31:56.512000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 11:32:53.549000000~2019-05-14 11:47:54.952000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 08:02:36.238000000~2019-05-14 08:17:37.204000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 19:59:18.996000000~2019-05-14 21:19:21.805000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 04:17:16.184000000~2019-05-14 04:32:17.569000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 08:32:38.759000000~2019-05-14 08:47:39.890000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 16:35:08.328000000~2019-05-14 16:50:09.477000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 19:41:01.975000000~2019-05-14 19:59:18.996000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 16:20:06.873000000~2019-05-14 16:35:08.328000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 14:18:08.816000000~2019-05-14 14:33:09.949000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 17:20:12.227000000~2019-05-14 17:35:13.719000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 00:31:56.512000000~2019-05-14 00:46:57.832000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 18:50:32.147000000~2019-05-14 19:05:39.105000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 03:47:13.548000000~2019-05-14 04:02:14.881000000.txt': 0,\n",
       " 'graph_5_14/2019-05-14 07:47:34.625000000~2019-05-14 08:02:36.238000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 21:14:28.862000000~2019-05-15 21:29:30.066000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 04:02:33.803000000~2019-05-15 05:22:53.612000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 20:14:24.590000000~2019-05-15 20:29:25.523000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 21:59:32.550000000~2019-05-15 22:14:33.792000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 18:44:17.260000000~2019-05-15 18:59:18.387000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 13:37:57.665000000~2019-05-15 13:52:58.745000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 20:29:25.523000000~2019-05-15 20:44:26.676000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 13:07:55.465000000~2019-05-15 13:22:56.671000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 23:14:38.671000000~2019-05-15 23:29:39.881000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 13:22:56.671000000~2019-05-15 13:37:57.665000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 22:59:37.524000000~2019-05-15 23:14:38.671000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 11:30:40.055000000~2019-05-15 12:37:07.284000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 15:08:41.249000000~2019-05-15 15:23:58.047000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 15:23:58.047000000~2019-05-15 15:38:59.175000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 14:07:59.753000000~2019-05-15 14:23:00.265000000.txt': 1,\n",
       " 'graph_5_15/2019-05-15 20:44:26.676000000~2019-05-15 20:59:27.020000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 21:44:31.381000000~2019-05-15 21:59:32.550000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 14:38:02.135000000~2019-05-15 14:53:31.210000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 17:11:55.171000000~2019-05-15 17:27:58.721000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 22:44:36.431000000~2019-05-15 22:59:37.524000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 16:41:48.642000000~2019-05-15 16:56:48.656000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 12:52:16.045000000~2019-05-15 13:07:55.465000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 08:02:58.920000000~2019-05-15 09:24:01.092000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 14:23:00.265000000~2019-05-15 14:38:02.135000000.txt': 1,\n",
       " 'graph_5_15/2019-05-15 19:29:20.800000000~2019-05-15 19:44:22.319000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 05:22:53.612000000~2019-05-15 06:42:56.141000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 15:55:38.955000000~2019-05-15 16:11:27.687000000.txt': 1,\n",
       " 'graph_5_15/2019-05-15 19:59:22.601000000~2019-05-15 20:14:24.590000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 00:00:01.169000000~2019-05-15 01:20:28.968000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 16:26:39.781000000~2019-05-15 16:41:48.642000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 17:27:58.721000000~2019-05-15 17:43:17.531000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 01:20:28.968000000~2019-05-15 02:41:31.305000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 20:59:27.020000000~2019-05-15 21:14:28.862000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 17:58:19.189000000~2019-05-15 18:14:08.311000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 22:29:35.074000000~2019-05-15 22:44:36.431000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 11:06:20.806000000~2019-05-15 11:30:40.055000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 13:52:58.745000000~2019-05-15 14:07:59.753000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 12:37:07.284000000~2019-05-15 12:52:16.045000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 19:14:19.745000000~2019-05-15 19:29:20.800000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 16:11:27.687000000~2019-05-15 16:26:39.781000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 02:41:31.305000000~2019-05-15 04:02:33.803000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 18:59:18.387000000~2019-05-15 19:14:19.745000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 10:46:03.642000000~2019-05-15 11:06:20.806000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 17:43:17.531000000~2019-05-15 17:58:19.189000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 09:24:01.092000000~2019-05-15 10:46:03.642000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 16:56:48.656000000~2019-05-15 17:11:55.171000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 21:29:30.066000000~2019-05-15 21:44:31.381000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 14:53:31.210000000~2019-05-15 15:08:41.249000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 15:38:59.175000000~2019-05-15 15:55:38.955000000.txt': 1,\n",
       " 'graph_5_15/2019-05-15 19:44:22.319000000~2019-05-15 19:59:22.601000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 18:29:15.893000000~2019-05-15 18:44:17.260000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 22:14:33.792000000~2019-05-15 22:29:35.074000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 23:29:39.881000000~2019-05-15 23:44:41.019000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 18:14:08.311000000~2019-05-15 18:29:15.893000000.txt': 0,\n",
       " 'graph_5_15/2019-05-15 06:42:56.141000000~2019-05-15 08:02:58.920000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 06:02:34.706000000~2019-05-17 06:17:36.113000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 15:21:40.474000000~2019-05-17 15:36:41.964000000.txt': 1,\n",
       " 'graph_5_17/2019-05-17 07:32:42.668000000~2019-05-17 07:47:44.085000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 09:02:49.657000000~2019-05-17 09:17:51.260000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 10:17:59.598000000~2019-05-17 10:33:01.000000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 10:02:57.226000000~2019-05-17 10:17:59.598000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 11:48:07.561000000~2019-05-17 12:03:08.991000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 20:22:07.965000000~2019-05-17 20:37:09.206000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 22:07:18.054000000~2019-05-17 22:22:19.501000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 17:51:54.220000000~2019-05-17 18:06:55.854000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 22:37:20.816000000~2019-05-17 22:52:22.392000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 10:33:01.000000000~2019-05-17 10:48:02.004000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 04:47:28.259000000~2019-05-17 05:02:29.618000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 05:32:31.961000000~2019-05-17 05:47:33.352000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 15:36:41.964000000~2019-05-17 15:51:43.493000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 07:47:44.085000000~2019-05-17 08:02:45.378000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 14:05:21.553000000~2019-05-17 14:20:43.541000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 16:36:47.843000000~2019-05-17 16:51:49.352000000.txt': 1,\n",
       " 'graph_5_17/2019-05-17 05:17:30.624000000~2019-05-17 05:32:31.961000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 08:32:47.467000000~2019-05-17 08:47:48.296000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 11:18:04.107000000~2019-05-17 11:33:05.441000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 00:47:07.880000000~2019-05-17 01:02:09.047000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 02:17:15.482000000~2019-05-17 02:32:16.882000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 20:07:06.424000000~2019-05-17 20:22:07.965000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 22:52:22.392000000~2019-05-17 23:07:23.887000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 18:36:58.618000000~2019-05-17 18:52:00.027000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 20:37:09.206000000~2019-05-17 20:52:10.643000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 14:50:52.897000000~2019-05-17 15:06:00.371000000.txt': 1,\n",
       " 'graph_5_17/2019-05-17 23:07:23.887000000~2019-05-17 23:22:25.304000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 23:37:26.760000000~2019-05-17 23:52:28.261000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 17:21:51.831000000~2019-05-17 17:36:53.217000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 03:02:19.211000000~2019-05-17 03:17:20.492000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 07:02:40.131000000~2019-05-17 07:17:41.404000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 20:52:10.643000000~2019-05-17 21:07:12.192000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 09:47:52.897000000~2019-05-17 10:02:57.226000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 03:32:21.866000000~2019-05-17 03:47:23.177000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 19:37:03.821000000~2019-05-17 19:52:04.945000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 09:17:51.260000000~2019-05-17 09:32:52.628000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 03:47:23.177000000~2019-05-17 04:02:24.594000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 19:22:02.614000000~2019-05-17 19:37:03.821000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 02:02:14.158000000~2019-05-17 02:17:15.482000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 01:02:09.047000000~2019-05-17 01:17:10.410000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 17:06:50.973000000~2019-05-17 17:21:51.831000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 00:17:05.023000000~2019-05-17 00:32:06.529000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 21:07:12.192000000~2019-05-17 21:22:13.697000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 15:06:00.371000000~2019-05-17 15:21:40.474000000.txt': 1,\n",
       " 'graph_5_17/2019-05-17 13:33:16.997000000~2019-05-17 13:50:01.226000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 01:47:12.976000000~2019-05-17 02:02:14.158000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 13:03:13.328000000~2019-05-17 13:18:15.486000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 08:02:45.378000000~2019-05-17 08:17:46.281000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 04:32:26.948000000~2019-05-17 04:47:28.259000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 10:48:02.004000000~2019-05-17 11:03:03.339000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 13:50:01.226000000~2019-05-17 14:05:21.553000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 18:21:57.246000000~2019-05-17 18:36:58.618000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 08:47:48.296000000~2019-05-17 09:02:49.657000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 06:47:38.801000000~2019-05-17 07:02:40.131000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 16:51:49.352000000~2019-05-17 17:06:50.973000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 03:17:20.492000000~2019-05-17 03:32:21.866000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 00:00:00.162000000~2019-05-17 00:17:05.023000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 21:22:13.697000000~2019-05-17 21:37:14.934000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 12:48:13.087000000~2019-05-17 13:03:13.328000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 16:21:46.485000000~2019-05-17 16:36:47.843000000.txt': 1,\n",
       " 'graph_5_17/2019-05-17 22:22:19.501000000~2019-05-17 22:37:20.816000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 12:33:11.434000000~2019-05-17 12:48:13.087000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 00:32:06.529000000~2019-05-17 00:47:07.880000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 16:06:44.953000000~2019-05-17 16:21:46.485000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 23:22:25.304000000~2019-05-17 23:37:26.760000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 06:17:36.113000000~2019-05-17 06:32:37.454000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 19:07:01.518000000~2019-05-17 19:22:02.614000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 11:33:05.441000000~2019-05-17 11:48:07.561000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 18:06:55.854000000~2019-05-17 18:21:57.246000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 04:17:25.489000000~2019-05-17 04:32:26.948000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 15:51:43.493000000~2019-05-17 16:06:44.953000000.txt': 1,\n",
       " 'graph_5_17/2019-05-17 02:32:16.882000000~2019-05-17 02:47:18.043000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 07:17:41.404000000~2019-05-17 07:32:42.668000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 04:02:24.594000000~2019-05-17 04:17:25.489000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 14:35:45.993000000~2019-05-17 14:50:52.897000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 05:02:29.618000000~2019-05-17 05:17:30.624000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 18:52:00.027000000~2019-05-17 19:07:01.518000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 12:03:08.991000000~2019-05-17 12:18:10.191000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 13:18:15.486000000~2019-05-17 13:33:16.997000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 01:32:11.778000000~2019-05-17 01:47:12.976000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 21:52:16.493000000~2019-05-17 22:07:18.054000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 17:36:53.217000000~2019-05-17 17:51:54.220000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 21:37:14.934000000~2019-05-17 21:52:16.493000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 08:17:46.281000000~2019-05-17 08:32:47.467000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 19:52:04.945000000~2019-05-17 20:07:06.424000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 11:03:03.339000000~2019-05-17 11:18:04.107000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 01:17:10.410000000~2019-05-17 01:32:11.778000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 02:47:18.043000000~2019-05-17 03:02:19.211000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 14:20:43.541000000~2019-05-17 14:35:45.993000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 12:18:10.191000000~2019-05-17 12:33:11.434000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 05:47:33.352000000~2019-05-17 06:02:34.706000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 06:32:37.454000000~2019-05-17 06:47:38.801000000.txt': 0,\n",
       " 'graph_5_17/2019-05-17 09:32:52.628000000~2019-05-17 09:47:52.897000000.txt': 0}"
      ]
     },
     "execution_count": 28,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "labels"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 5-14"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "index_count: 0\n",
      "thr: 1.2994436352680885\n",
      "graph_5_14/2019-05-14 00:00:00.110000000~2019-05-14 00:16:55.164000000.txt    2.3240000991915886  count: 1569  percentage: 0.0383056640625  node count: 409  edge count: 408\n",
      "index_count: 1\n",
      "thr: 1.4782385488367134\n",
      "graph_5_14/2019-05-14 00:16:55.164000000~2019-05-14 00:31:56.512000000.txt    1.8541332864371292  count: 1270  percentage: 0.09540264423076923  node count: 32  edge count: 35\n",
      "index_count: 2\n",
      "thr: 1.0407567938091484\n",
      "graph_5_14/2019-05-14 00:31:56.512000000~2019-05-14 00:46:57.832000000.txt    1.469677932960212  count: 835  percentage: 0.020908453525641024  node count: 106  edge count: 104\n",
      "index_count: 3\n",
      "thr: 1.5060143560294126\n",
      "graph_5_14/2019-05-14 00:46:57.832000000~2019-05-14 01:01:59.206000000.txt    2.199582764401979  count: 735  percentage: 0.059814453125  node count: 29  edge count: 28\n",
      "index_count: 4\n",
      "thr: 1.0218455520756344\n",
      "graph_5_14/2019-05-14 01:01:59.206000000~2019-05-14 01:17:00.513000000.txt    1.2468562588354621  count: 815  percentage: 0.020407652243589744  node count: 22  edge count: 22\n",
      "index_count: 5\n",
      "thr: 1.1927506247552309\n",
      "graph_5_14/2019-05-14 01:17:00.513000000~2019-05-14 01:32:01.927000000.txt    2.034307595324343  count: 319  percentage: 0.023963341346153848  node count: 17  edge count: 26\n",
      "index_count: 6\n",
      "thr: 1.041212670822445\n",
      "graph_5_14/2019-05-14 01:32:01.927000000~2019-05-14 01:47:02.953000000.txt    1.5033491310401264  count: 590  percentage: 0.015572212837837838  node count: 112  edge count: 107\n",
      "index_count: 7\n",
      "thr: 1.1223935917671186\n",
      "graph_5_14/2019-05-14 01:47:02.953000000~2019-05-14 02:02:04.367000000.txt    1.4097186799061696  count: 404  percentage: 0.032877604166666664  node count: 47  edge count: 44\n",
      "index_count: 8\n",
      "thr: 1.015619896257226\n",
      "graph_5_14/2019-05-14 02:02:04.367000000~2019-05-14 02:17:05.742000000.txt    1.3721730392863938  count: 607  percentage: 0.01519931891025641  node count: 46  edge count: 43\n",
      "index_count: 9\n",
      "thr: 1.0221815161817256\n",
      "graph_5_14/2019-05-14 02:17:05.742000000~2019-05-14 02:32:06.894000000.txt    1.6232139234722238  count: 69  percentage: 0.005183293269230769  node count: 10  edge count: 8\n",
      "index_count: 10\n",
      "thr: 0.9975046209922468\n",
      "graph_5_14/2019-05-14 02:32:06.894000000~2019-05-14 02:47:08.231000000.txt    1.930500884661266  count: 115  percentage: 0.002879607371794872  node count: 38  edge count: 37\n",
      "index_count: 11\n",
      "thr: 1.0136117413011592\n",
      "graph_5_14/2019-05-14 02:47:08.231000000~2019-05-14 03:02:09.643000000.txt    1.4879617040614308  count: 77  percentage: 0.006266276041666667  node count: 21  edge count: 18\n",
      "index_count: 12\n",
      "thr: 1.049529435548738\n",
      "graph_5_14/2019-05-14 03:02:09.643000000~2019-05-14 03:17:10.873000000.txt    1.4711645044114652  count: 438  percentage: 0.012961647727272728  node count: 37  edge count: 34\n",
      "index_count: 13\n",
      "thr: 1.0237597712935047\n",
      "graph_5_14/2019-05-14 03:17:10.873000000~2019-05-14 03:32:12.207000000.txt    1.3675279548003048  count: 155  percentage: 0.011643629807692308  node count: 14  edge count: 13\n",
      "index_count: 14\n",
      "thr: 1.0701814711338131\n",
      "graph_5_14/2019-05-14 03:32:12.207000000~2019-05-14 03:47:13.548000000.txt    1.3938788206606791  count: 1306  percentage: 0.036439732142857145  node count: 253  edge count: 251\n",
      "index_count: 15\n",
      "thr: 1.4801905624334672\n",
      "graph_5_14/2019-05-14 03:47:13.548000000~2019-05-14 04:02:14.881000000.txt    2.018996669950968  count: 855  percentage: 0.069580078125  node count: 82  edge count: 82\n",
      "index_count: 16\n",
      "thr: 1.04550630521608\n",
      "graph_5_14/2019-05-14 04:02:14.881000000~2019-05-14 04:17:16.184000000.txt    1.7396320523670863  count: 347  percentage: 0.011685075431034482  node count: 41  edge count: 43\n",
      "index_count: 17\n",
      "thr: 1.051396156712218\n",
      "graph_5_14/2019-05-14 04:17:16.184000000~2019-05-14 04:32:17.569000000.txt    1.5124057644434468  count: 158  percentage: 0.011868990384615384  node count: 16  edge count: 17\n",
      "index_count: 18\n",
      "thr: 1.016524737721394\n",
      "graph_5_14/2019-05-14 04:32:17.569000000~2019-05-14 04:47:18.740000000.txt    1.841415018041551  count: 141  percentage: 0.004748114224137931  node count: 43  edge count: 40\n",
      "index_count: 19\n",
      "thr: 1.5989825304623677\n",
      "graph_5_14/2019-05-14 04:47:18.740000000~2019-05-14 05:02:20.031000000.txt    2.8802117744306734  count: 550  percentage: 0.044759114583333336  node count: 29  edge count: 29\n",
      "index_count: 20\n",
      "thr: 1.1443597381568074\n",
      "graph_5_14/2019-05-14 05:02:20.031000000~2019-05-14 05:17:21.435000000.txt    1.9832610676815408  count: 691  percentage: 0.023269127155172414  node count: 80  edge count: 77\n",
      "index_count: 21\n",
      "thr: 1.1339102084105437\n",
      "graph_5_14/2019-05-14 05:17:21.435000000~2019-05-14 05:32:22.709000000.txt    2.513957474992929  count: 75  percentage: 0.006103515625  node count: 14  edge count: 11\n",
      "index_count: 22\n",
      "thr: 1.2345208533782566\n",
      "graph_5_14/2019-05-14 05:32:22.709000000~2019-05-14 05:47:23.937000000.txt    2.501240612414151  count: 711  percentage: 0.02394261853448276  node count: 41  edge count: 37\n",
      "index_count: 23\n",
      "thr: 1.2839779358655894\n",
      "graph_5_14/2019-05-14 05:47:23.937000000~2019-05-14 06:02:25.486000000.txt    1.505336853239359  count: 960  percentage: 0.07211538461538461  node count: 49  edge count: 47\n",
      "index_count: 24\n",
      "thr: 1.0335252989437227\n",
      "graph_5_14/2019-05-14 06:02:25.486000000~2019-05-14 06:17:26.839000000.txt    2.305296696913426  count: 105  percentage: 0.003307711693548387  node count: 35  edge count: 30\n",
      "index_count: 25\n",
      "thr: 1.1093897075752996\n",
      "graph_5_14/2019-05-14 06:17:26.839000000~2019-05-14 06:32:28.279000000.txt    1.403174462521928  count: 368  percentage: 0.029947916666666668  node count: 25  edge count: 24\n",
      "index_count: 26\n",
      "thr: 1.0406472055267384\n",
      "graph_5_14/2019-05-14 06:32:28.279000000~2019-05-14 06:47:29.637000000.txt    1.4078292866697961  count: 833  percentage: 0.0232421875  node count: 50  edge count: 50\n",
      "index_count: 27\n",
      "thr: 1.1759414335510334\n",
      "graph_5_14/2019-05-14 06:47:29.637000000~2019-05-14 07:02:31.171000000.txt    1.4252814302965857  count: 686  percentage: 0.05153245192307692  node count: 21  edge count: 19\n",
      "index_count: 28\n",
      "thr: 1.0167501875222555\n",
      "graph_5_14/2019-05-14 07:02:31.171000000~2019-05-14 07:17:32.480000000.txt    1.5691343135245805  count: 270  percentage: 0.007126266891891892  node count: 40  edge count: 40\n",
      "index_count: 29\n",
      "thr: 1.0812892648733814\n",
      "graph_5_14/2019-05-14 07:17:32.480000000~2019-05-14 07:32:33.922000000.txt    1.722395196915584  count: 203  percentage: 0.016520182291666668  node count: 18  edge count: 20\n",
      "index_count: 30\n",
      "thr: 1.0158302979623088\n",
      "graph_5_14/2019-05-14 07:32:33.922000000~2019-05-14 07:47:34.625000000.txt    1.6304008821894245  count: 335  percentage: 0.007979230182926829  node count: 41  edge count: 41\n",
      "index_count: 31\n",
      "thr: 1.0565793615273096\n",
      "graph_5_14/2019-05-14 07:47:34.625000000~2019-05-14 08:02:36.238000000.txt    1.4913700680905202  count: 190  percentage: 0.014272836538461538  node count: 17  edge count: 19\n",
      "index_count: 32\n",
      "thr: 1.0191238752816556\n",
      "graph_5_14/2019-05-14 08:02:36.238000000~2019-05-14 08:17:37.204000000.txt    1.6523678698963309  count: 355  percentage: 0.008455602134146341  node count: 37  edge count: 41\n",
      "index_count: 33\n",
      "thr: 1.1995904177812968\n",
      "graph_5_14/2019-05-14 08:17:37.204000000~2019-05-14 08:32:38.759000000.txt    2.355883966698477  count: 255  percentage: 0.01915564903846154  node count: 15  edge count: 13\n",
      "index_count: 34\n",
      "thr: 1.04768053123508\n",
      "graph_5_14/2019-05-14 08:32:38.759000000~2019-05-14 08:47:39.890000000.txt    1.6163960620065425  count: 666  percentage: 0.015125363372093024  node count: 45  edge count: 46\n",
      "index_count: 35\n",
      "thr: 1.0305298813368784\n",
      "graph_5_14/2019-05-14 08:47:39.890000000~2019-05-14 09:02:41.157000000.txt    1.774164175512873  count: 54  percentage: 0.00439453125  node count: 13  edge count: 10\n",
      "index_count: 36\n",
      "thr: 1.522408105098941\n",
      "graph_5_14/2019-05-14 09:02:41.157000000~2019-05-14 09:17:42.517000000.txt    1.8183248257547804  count: 14367  percentage: 0.052745388862781954  node count: 563  edge count: 579\n",
      "index_count: 37\n",
      "thr: 1.5088727091071938\n",
      "graph_5_14/2019-05-14 09:17:42.517000000~2019-05-14 09:32:44.093000000.txt    1.7087788596852138  count: 8818  percentage: 0.038103221792035395  node count: 347  edge count: 351\n",
      "index_count: 38\n",
      "thr: 1.4106448202851956\n",
      "graph_5_14/2019-05-14 09:32:44.093000000~2019-05-14 09:47:44.971000000.txt    1.6135813016935365  count: 8542  percentage: 0.057529633620689656  node count: 469  edge count: 479\n",
      "index_count: 39\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "thr: 1.5154519355211842\n",
      "graph_5_14/2019-05-14 09:47:44.971000000~2019-05-14 10:02:46.433000000.txt    1.68470143776757  count: 11606  percentage: 0.03976836622807017  node count: 528  edge count: 542\n",
      "index_count: 40\n",
      "thr: 1.4950500794685122\n",
      "graph_5_14/2019-05-14 10:02:46.433000000~2019-05-14 10:17:47.485000000.txt    1.7302317450874771  count: 16468  percentage: 0.04814979416167665  node count: 596  edge count: 607\n",
      "index_count: 41\n",
      "thr: 1.499693620907286\n",
      "graph_5_14/2019-05-14 10:17:47.485000000~2019-05-14 10:32:48.904000000.txt    1.7762107164568588  count: 5637  percentage: 0.041390096334586464  node count: 485  edge count: 492\n",
      "index_count: 42\n",
      "thr: 1.4340534199578903\n",
      "graph_5_14/2019-05-14 10:32:48.904000000~2019-05-14 10:47:49.773000000.txt    1.714631811360223  count: 8303  percentage: 0.04884577371987952  node count: 457  edge count: 468\n",
      "index_count: 43\n",
      "thr: 1.5123144511343576\n",
      "graph_5_14/2019-05-14 10:47:49.773000000~2019-05-14 11:02:51.213000000.txt    1.6992958948041406  count: 13496  percentage: 0.04210762779552716  node count: 461  edge count: 458\n",
      "index_count: 44\n",
      "thr: 1.389903587005385\n",
      "graph_5_14/2019-05-14 11:02:51.213000000~2019-05-14 11:17:52.335000000.txt    1.5894979202460326  count: 7573  percentage: 0.04963428062080537  node count: 397  edge count: 405\n",
      "index_count: 45\n",
      "thr: 1.5046872876052066\n",
      "graph_5_14/2019-05-14 11:17:52.335000000~2019-05-14 11:32:53.549000000.txt    1.69137786706089  count: 8013  percentage: 0.043473307291666666  node count: 384  edge count: 386\n",
      "index_count: 46\n",
      "thr: 1.3021493016321415\n",
      "graph_5_14/2019-05-14 11:32:53.549000000~2019-05-14 11:47:54.952000000.txt    1.5551216598562456  count: 7924  percentage: 0.07440655048076923  node count: 376  edge count: 392\n",
      "index_count: 47\n",
      "thr: 1.5482483041095292\n",
      "graph_5_14/2019-05-14 11:47:54.952000000~2019-05-14 12:02:56.496000000.txt    1.812951072881615  count: 12981  percentage: 0.04183748452970297  node count: 606  edge count: 627\n",
      "index_count: 48\n",
      "thr: 1.4822501461958089\n",
      "graph_5_14/2019-05-14 12:02:56.496000000~2019-05-14 12:17:57.806000000.txt    1.6920766038365236  count: 10642  percentage: 0.040281310562015504  node count: 270  edge count: 271\n",
      "index_count: 49\n",
      "thr: 1.521636747565153\n",
      "graph_5_14/2019-05-14 12:17:57.806000000~2019-05-14 12:32:59.282000000.txt    1.7758472632791256  count: 6132  percentage: 0.04502467105263158  node count: 155  edge count: 159\n",
      "index_count: 50\n",
      "thr: 1.3717089470267272\n",
      "graph_5_14/2019-05-14 12:32:59.282000000~2019-05-14 12:48:00.531000000.txt    1.8723860601369664  count: 3052  percentage: 0.06623263888888889  node count: 288  edge count: 293\n",
      "index_count: 51\n",
      "thr: 1.4992681516406197\n",
      "graph_5_14/2019-05-14 12:48:00.531000000~2019-05-14 13:03:02.064000000.txt    1.8748037391088324  count: 2238  percentage: 0.04751188858695652  node count: 333  edge count: 338\n",
      "index_count: 52\n",
      "thr: 1.4531028830233155\n",
      "graph_5_14/2019-05-14 13:03:02.064000000~2019-05-14 13:18:02.860000000.txt    1.6928199085900713  count: 10238  percentage: 0.049741526741293535  node count: 476  edge count: 477\n",
      "index_count: 53\n",
      "thr: 1.523297825486913\n",
      "graph_5_14/2019-05-14 13:18:02.860000000~2019-05-14 13:33:03.664000000.txt    1.73165366336333  count: 15799  percentage: 0.04147502940188172  node count: 426  edge count: 430\n",
      "index_count: 54\n",
      "thr: 1.514452622409415\n",
      "graph_5_14/2019-05-14 13:33:03.664000000~2019-05-14 13:48:04.514000000.txt    1.7610804442045898  count: 16212  percentage: 0.04562545028818444  node count: 787  edge count: 793\n",
      "index_count: 55\n",
      "thr: 1.5233268525552992\n",
      "graph_5_14/2019-05-14 13:48:04.514000000~2019-05-14 14:03:07.325000000.txt    1.7012332857803085  count: 12758  percentage: 0.04386966329225352  node count: 487  edge count: 493\n",
      "index_count: 56\n",
      "thr: 1.4608529936949837\n",
      "graph_5_14/2019-05-14 14:03:07.325000000~2019-05-14 14:18:08.816000000.txt    1.8027048715182519  count: 5175  percentage: 0.03948211669921875  node count: 172  edge count: 173\n",
      "index_count: 57\n",
      "thr: 1.5181189346959576\n",
      "graph_5_14/2019-05-14 14:18:08.816000000~2019-05-14 14:33:09.949000000.txt    1.7777197013206774  count: 6139  percentage: 0.03294020432692308  node count: 217  edge count: 220\n",
      "index_count: 58\n",
      "thr: 1.505173295297928\n",
      "graph_5_14/2019-05-14 14:33:09.949000000~2019-05-14 14:48:12.419000000.txt    1.7289170960396285  count: 15335  percentage: 0.04315730817723343  node count: 640  edge count: 652\n",
      "index_count: 59\n",
      "thr: 1.4829033073802163\n",
      "graph_5_14/2019-05-14 14:48:12.419000000~2019-05-14 15:04:01.280000000.txt    1.6798693699197518  count: 19817  percentage: 0.05013611156088083  node count: 679  edge count: 683\n",
      "index_count: 60\n",
      "thr: 1.454544870154166\n",
      "graph_5_14/2019-05-14 15:04:01.280000000~2019-05-14 15:20:00.233000000.txt    1.661869537847787  count: 13580  percentage: 0.04822443181818182  node count: 399  edge count: 408\n",
      "index_count: 61\n",
      "thr: 1.5074282132550931\n",
      "graph_5_14/2019-05-14 15:20:00.233000000~2019-05-14 15:35:02.947000000.txt    1.8213523217232706  count: 3118  percentage: 0.05160884533898305  node count: 185  edge count: 187\n",
      "index_count: 62\n",
      "thr: 1.3241433179679003\n",
      "graph_5_14/2019-05-14 15:35:02.947000000~2019-05-14 15:50:03.958000000.txt    1.9403147297761154  count: 3087  percentage: 0.0538330078125  node count: 409  edge count: 412\n",
      "index_count: 63\n",
      "thr: 1.555146674143474\n",
      "graph_5_14/2019-05-14 15:50:03.958000000~2019-05-14 16:05:05.450000000.txt    1.8936480315586863  count: 5726  percentage: 0.0436859130859375  node count: 330  edge count: 332\n",
      "index_count: 64\n",
      "thr: 1.4797602890735575\n",
      "graph_5_14/2019-05-14 16:05:05.450000000~2019-05-14 16:20:06.873000000.txt    1.8083895355025348  count: 13932  percentage: 0.056926647489539746  node count: 463  edge count: 477\n",
      "index_count: 65\n",
      "thr: 1.5353652438022871\n",
      "graph_5_14/2019-05-14 16:20:06.873000000~2019-05-14 16:35:08.328000000.txt    1.7459537263204472  count: 12775  percentage: 0.04316811743079585  node count: 512  edge count: 530\n",
      "index_count: 66\n",
      "thr: 1.434608050714981\n",
      "graph_5_14/2019-05-14 16:35:08.328000000~2019-05-14 16:50:09.477000000.txt    1.7331544087978492  count: 7706  percentage: 0.04855090725806452  node count: 399  edge count: 408\n",
      "index_count: 67\n",
      "thr: 1.5166793559153868\n",
      "graph_5_14/2019-05-14 16:50:09.477000000~2019-05-14 17:05:10.964000000.txt    1.699914429925953  count: 16557  percentage: 0.03795527068661972  node count: 667  edge count: 675\n",
      "index_count: 68\n",
      "thr: 1.402579088009424\n",
      "graph_5_14/2019-05-14 17:05:10.964000000~2019-05-14 17:20:12.227000000.txt    1.618930465858621  count: 8963  percentage: 0.05720869076797386  node count: 160  edge count: 158\n",
      "index_count: 69\n",
      "thr: 1.5522721174903222\n",
      "graph_5_14/2019-05-14 17:20:12.227000000~2019-05-14 17:35:13.719000000.txt    2.0483095422788327  count: 1806  percentage: 0.083984375  node count: 218  edge count: 217\n",
      "index_count: 70\n",
      "thr: 1.4467083388754696\n",
      "graph_5_14/2019-05-14 17:35:13.719000000~2019-05-14 17:50:26.379000000.txt    1.6943482102480965  count: 12991  percentage: 0.05638454861111111  node count: 290  edge count: 292\n",
      "index_count: 71\n",
      "thr: 1.5452902191343498\n",
      "graph_5_14/2019-05-14 17:50:26.379000000~2019-05-14 18:05:27.471000000.txt    1.847834535396021  count: 6576  percentage: 0.04143145161290323  node count: 301  edge count: 305\n",
      "index_count: 72\n",
      "thr: 1.6291409521958804\n",
      "graph_5_14/2019-05-14 18:05:27.471000000~2019-05-14 18:20:29.235000000.txt    2.5268278579528465  count: 10327  percentage: 0.033174213610197366  node count: 999  edge count: 1045\n",
      "index_count: 73\n",
      "thr: 1.5356393842712448\n",
      "graph_5_14/2019-05-14 18:20:29.235000000~2019-05-14 18:35:31.058000000.txt    1.7713284772024656  count: 11992  percentage: 0.03753505608974359  node count: 585  edge count: 589\n",
      "index_count: 74\n",
      "thr: 1.471742365457059\n",
      "graph_5_14/2019-05-14 18:35:31.058000000~2019-05-14 18:50:32.147000000.txt    1.8594974367304773  count: 7513  percentage: 0.05394789751838235  node count: 542  edge count: 556\n",
      "index_count: 75\n",
      "thr: 1.4408450324182187\n",
      "graph_5_14/2019-05-14 18:50:32.147000000~2019-05-14 19:05:39.105000000.txt    1.6485577472248925  count: 5618  percentage: 0.04496990266393443  node count: 317  edge count: 321\n",
      "index_count: 76\n",
      "thr: 1.4198378993611551\n",
      "graph_5_14/2019-05-14 19:05:39.105000000~2019-05-14 19:22:38.029000000.txt    1.7492506533574999  count: 686  percentage: 0.02576622596153846  node count: 47  edge count: 48\n",
      "index_count: 77\n",
      "thr: 1.6025207439273688\n",
      "graph_5_14/2019-05-14 19:22:38.029000000~2019-05-14 19:41:01.975000000.txt    2.2431860253140274  count: 101  percentage: 0.016438802083333332  node count: 4  edge count: 5\n",
      "index_count: 78\n",
      "thr: 1.0370682054821279\n",
      "graph_5_14/2019-05-14 19:41:01.975000000~2019-05-14 19:59:18.996000000.txt    0.0  count: 0  percentage: 0.0  node count: 0  edge count: 0\n",
      "index_count: 79\n",
      "thr: 1.2968502974361322\n",
      "graph_5_14/2019-05-14 19:59:18.996000000~2019-05-14 21:19:21.805000000.txt    0.0  count: 0  percentage: 0.0  node count: 0  edge count: 0\n",
      "index_count: 80\n",
      "thr: 1.3251496559310563\n",
      "graph_5_14/2019-05-14 21:19:21.805000000~2019-05-14 22:39:40.614000000.txt    0.0  count: 0  percentage: 0.0  node count: 0  edge count: 0\n",
      "index_count: 81\n",
      "thr: 1.2987624305527317\n",
      "graph_5_14/2019-05-14 22:39:40.614000000~2019-05-14 23:59:43.171000000.txt    0.0  count: 0  percentage: 0.0  node count: 0  edge count: 0\n"
     ]
    }
   ],
   "source": [
    "# node_IDF=torch.load(\"node_IDF\")\n",
    "y_data_5_14=[]\n",
    "df_list_5_14=[]\n",
    "# node_set_list=[]\n",
    "history_list_5_14=[]\n",
    "tw_que=[]\n",
    "his_tw={}\n",
    "current_tw={}\n",
    "\n",
    "file_path_list=[]\n",
    "\n",
    "file_path=\"graph_5_14/\"\n",
    "file_l=os.listdir(\"graph_5_14/\")\n",
    "for i in file_l:\n",
    "    file_path_list.append(file_path+i)\n",
    "\n",
    "index_count=0\n",
    "for f_path in sorted(file_path_list):\n",
    "    f=open(f_path)\n",
    "    edge_loss_list=[]\n",
    "    edge_list=[]\n",
    "    print('index_count:',index_count)\n",
    "    \n",
    "    for line in f:\n",
    "        l=line.strip()\n",
    "        jdata=eval(l)\n",
    "        edge_loss_list.append(jdata['loss'])\n",
    "        edge_list.append([str(jdata['srcmsg']),str(jdata['dstmsg'])])\n",
    "    df_list_5_14.append(pd.DataFrame(edge_loss_list))\n",
    "    count,loss_avg,node_set,edge_set=cal_anomaly_loss(edge_loss_list,edge_list,\"graph_5_14/\")\n",
    "\n",
    "    current_tw['name']=f_path\n",
    "    current_tw['loss']=loss_avg\n",
    "    current_tw['index']=index_count\n",
    "    current_tw['nodeset']=node_set\n",
    "\n",
    "    added_que_flag=False\n",
    "    for hq in history_list_5_14:\n",
    "        for his_tw in hq:\n",
    "            if cal_set_rel_bak(current_tw['nodeset'],his_tw['nodeset'],file_list_5_9_12)!=0 and current_tw['name']!=his_tw['name']:\n",
    "                print(\"history queue:\",his_tw['name'])\n",
    "                hq.append(copy.deepcopy(current_tw))\n",
    "                added_que_flag=True\n",
    "                break\n",
    "            if added_que_flag:\n",
    "                break\n",
    "    if added_que_flag is False:\n",
    "        temp_hq=[copy.deepcopy(current_tw)]\n",
    "        history_list_5_14.append(temp_hq)\n",
    "    index_count+=1\n",
    "    print( f_path,\"  \",loss_avg,\" count:\",count,\" percentage:\",count/len(edge_list),\" node count:\",len(node_set),\" edge count:\",len(edge_set))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "metadata": {
    "scrolled": false
   },
   "outputs": [],
   "source": [
    "name_list=[]\n",
    "for hl in history_list_5_14:\n",
    "    loss_count=0\n",
    "    for hq in hl:\n",
    "        if loss_count==0:\n",
    "            loss_count=(loss_count+1)*(hq['loss']+1)\n",
    "        else:\n",
    "            loss_count=(loss_count)*(hq['loss']+1)\n",
    "#     name_list=[]\n",
    "    if loss_count>100:\n",
    "        name_list=[]\n",
    "        for i in hl:\n",
    "            name_list.append(i['name']) \n",
    "        print(name_list)\n",
    "        for i in name_list:\n",
    "            pred_label[i]=1\n",
    "        print(loss_count)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 5-15"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "index_count: 0\n",
      "thr: 3.1683880261853683\n",
      "graph_5_15/2019-05-15 00:00:01.169000000~2019-05-15 01:20:28.968000000.txt    0.0  count: 0  percentage: 0.0  node count: 0  edge count: 0\n",
      "index_count: 1\n",
      "thr: 2.087508243346082\n",
      "graph_5_15/2019-05-15 01:20:28.968000000~2019-05-15 02:41:31.305000000.txt    0.0  count: 0  percentage: 0.0  node count: 0  edge count: 0\n",
      "index_count: 2\n",
      "thr: 1.6156690263165976\n",
      "graph_5_15/2019-05-15 02:41:31.305000000~2019-05-15 04:02:33.803000000.txt    0.0  count: 0  percentage: 0.0  node count: 0  edge count: 0\n",
      "index_count: 3\n",
      "thr: 2.40459458662835\n",
      "graph_5_15/2019-05-15 04:02:33.803000000~2019-05-15 05:22:53.612000000.txt    2.5836529671984367  count: 160  percentage: 0.15625  node count: 2  edge count: 1\n",
      "index_count: 4\n",
      "thr: 2.484223075000419\n",
      "graph_5_15/2019-05-15 05:22:53.612000000~2019-05-15 06:42:56.141000000.txt    0.0  count: 0  percentage: 0.0  node count: 0  edge count: 0\n",
      "index_count: 5\n",
      "thr: 1.8504494380717709\n",
      "graph_5_15/2019-05-15 06:42:56.141000000~2019-05-15 08:02:58.920000000.txt    0.0  count: 0  percentage: 0.0  node count: 0  edge count: 0\n",
      "index_count: 6\n",
      "thr: 2.277336484137706\n",
      "graph_5_15/2019-05-15 08:02:58.920000000~2019-05-15 09:24:01.092000000.txt    0.0  count: 0  percentage: 0.0  node count: 0  edge count: 0\n",
      "index_count: 7\n",
      "thr: 1.8354783997166941\n",
      "graph_5_15/2019-05-15 09:24:01.092000000~2019-05-15 10:46:03.642000000.txt    0.0  count: 0  percentage: 0.0  node count: 0  edge count: 0\n",
      "index_count: 8\n",
      "thr: 2.3694874175446374\n",
      "graph_5_15/2019-05-15 10:46:03.642000000~2019-05-15 11:06:20.806000000.txt    2.5479050094572155  count: 434  percentage: 0.14127604166666666  node count: 122  edge count: 120\n",
      "index_count: 9\n",
      "thr: 2.354432001568995\n",
      "graph_5_15/2019-05-15 11:06:20.806000000~2019-05-15 11:30:40.055000000.txt    3.350093695007201  count: 192  percentage: 0.0625  node count: 4  edge count: 4\n",
      "index_count: 10\n",
      "thr: 5.02757443509922\n",
      "graph_5_15/2019-05-15 11:30:40.055000000~2019-05-15 12:37:07.284000000.txt    0.0  count: 0  percentage: 0.0  node count: 0  edge count: 0\n",
      "index_count: 11\n",
      "thr: 3.065010589849819\n",
      "graph_5_15/2019-05-15 12:37:07.284000000~2019-05-15 12:52:16.045000000.txt    3.960514572464863  count: 1632  percentage: 0.12259615384615384  node count: 149  edge count: 149\n",
      "index_count: 12\n",
      "thr: 1.8381475089257016\n",
      "node: {'subject': '/system/bin/folio_daemon'}  IDF: 5.030437921392435\n",
      "history queue: graph_5_15/2019-05-15 12:37:07.284000000~2019-05-15 12:52:16.045000000.txt\n",
      "graph_5_15/2019-05-15 12:52:16.045000000~2019-05-15 13:07:55.465000000.txt    2.685575919935083  count: 6123  percentage: 0.06794877485795454  node count: 639  edge count: 736\n",
      "index_count: 13\n",
      "thr: 0.9968586352434362\n",
      "graph_5_15/2019-05-15 13:07:55.465000000~2019-05-15 13:22:56.671000000.txt    1.9328686833140043  count: 80  percentage: 0.0032552083333333335  node count: 19  edge count: 17\n",
      "index_count: 14\n",
      "thr: 1.0288386259839826\n",
      "graph_5_15/2019-05-15 13:22:56.671000000~2019-05-15 13:37:57.665000000.txt    1.6649496219528916  count: 103  percentage: 0.008382161458333334  node count: 14  edge count: 11\n",
      "index_count: 15\n",
      "thr: 1.0756124161489538\n",
      "graph_5_15/2019-05-15 13:37:57.665000000~2019-05-15 13:52:58.745000000.txt    1.9375446686374247  count: 284  percentage: 0.012058423913043478  node count: 17  edge count: 15\n",
      "index_count: 16\n",
      "thr: 1.0440914299005783\n",
      "graph_5_15/2019-05-15 13:52:58.745000000~2019-05-15 14:07:59.753000000.txt    1.9060561607122528  count: 120  percentage: 0.009765625  node count: 13  edge count: 12\n",
      "index_count: 17\n",
      "thr: 1.5106921259868908\n",
      "node: {'subject': '/system/bin/screencap'}  IDF: 5.030437921392435\n",
      "history queue: graph_5_15/2019-05-15 10:46:03.642000000~2019-05-15 11:06:20.806000000.txt\n",
      "graph_5_15/2019-05-15 14:07:59.753000000~2019-05-15 14:23:00.265000000.txt    1.9554665824168587  count: 4624  percentage: 0.047037760416666664  node count: 467  edge count: 495\n",
      "index_count: 18\n",
      "thr: 1.228223561926078\n",
      "node: {'subject': '/system/bin/screencap'}  IDF: 5.030437921392435\n",
      "history queue: graph_5_15/2019-05-15 10:46:03.642000000~2019-05-15 11:06:20.806000000.txt\n",
      "graph_5_15/2019-05-15 14:23:00.265000000~2019-05-15 14:38:02.135000000.txt    1.5813193935096492  count: 969  percentage: 0.0630859375  node count: 93  edge count: 95\n",
      "index_count: 19\n",
      "thr: 1.5056031833950294\n",
      "graph_5_15/2019-05-15 14:38:02.135000000~2019-05-15 14:53:31.210000000.txt    1.6833059348732557  count: 14291  percentage: 0.05038286890794224  node count: 510  edge count: 533\n",
      "index_count: 20\n",
      "thr: 1.582303551004248\n",
      "graph_5_15/2019-05-15 14:53:31.210000000~2019-05-15 15:08:41.249000000.txt    1.9583852159456374  count: 9037  percentage: 0.055504373034591194  node count: 850  edge count: 909\n",
      "index_count: 21\n",
      "thr: 1.4592005751549921\n",
      "graph_5_15/2019-05-15 15:08:41.249000000~2019-05-15 15:23:58.047000000.txt    1.6634577160500421  count: 6569  percentage: 0.03818475632440476  node count: 243  edge count: 240\n",
      "index_count: 22\n",
      "thr: 1.4992485127029904\n",
      "graph_5_15/2019-05-15 15:23:58.047000000~2019-05-15 15:38:59.175000000.txt    1.7164922914898035  count: 4421  percentage: 0.03721881734913793  node count: 263  edge count: 261\n",
      "index_count: 23\n",
      "thr: 1.4916891919094828\n",
      "node: {'subject': '/system/bin/screencap'}  IDF: 5.030437921392435\n",
      "history queue: graph_5_15/2019-05-15 10:46:03.642000000~2019-05-15 11:06:20.806000000.txt\n",
      "node: {'file': '/config/sdcardfs/de.belu.appstarter'}  IDF: 5.030437921392435\n",
      "history queue: graph_5_15/2019-05-15 12:37:07.284000000~2019-05-15 12:52:16.045000000.txt\n",
      "graph_5_15/2019-05-15 15:38:59.175000000~2019-05-15 15:55:38.955000000.txt    1.8085579376757788  count: 6625  percentage: 0.05054473876953125  node count: 495  edge count: 515\n",
      "index_count: 24\n",
      "thr: 1.0860871413745272\n",
      "node: {'subject': '/data/data/de.belu.appstarter/busybox'}  IDF: 5.030437921392435\n",
      "node: {'subject': '/system/bin/toybox'}  IDF: 5.030437921392435\n",
      "history queue: graph_5_15/2019-05-15 15:38:59.175000000~2019-05-15 15:55:38.955000000.txt\n",
      "graph_5_15/2019-05-15 15:55:38.955000000~2019-05-15 16:11:27.687000000.txt    1.8298831787165724  count: 183  percentage: 0.013746995192307692  node count: 55  edge count: 50\n",
      "index_count: 25\n",
      "thr: 1.7445038536692459\n",
      "node: {'subject': '/system/bin/screencap'}  IDF: 5.030437921392435\n",
      "history queue: graph_5_15/2019-05-15 10:46:03.642000000~2019-05-15 11:06:20.806000000.txt\n",
      "graph_5_15/2019-05-15 16:11:27.687000000~2019-05-15 16:26:39.781000000.txt    3.0597087734778796  count: 5070  percentage: 0.039609375  node count: 493  edge count: 515\n",
      "index_count: 26\n",
      "thr: 1.517070322021484\n",
      "graph_5_15/2019-05-15 16:26:39.781000000~2019-05-15 16:41:48.642000000.txt    1.6878211520867499  count: 18877  percentage: 0.04410184285287081  node count: 618  edge count: 618\n",
      "index_count: 27\n",
      "thr: 1.639856402753868\n",
      "node: {'file': '/data/app/vmdl1842893733.tmp/base.apk'}  IDF: 5.030437921392435\n",
      "history queue: graph_5_15/2019-05-15 16:26:39.781000000~2019-05-15 16:41:48.642000000.txt\n",
      "graph_5_15/2019-05-15 16:41:48.642000000~2019-05-15 16:56:48.656000000.txt    2.421436002604467  count: 10777  percentage: 0.037994274593862815  node count: 533  edge count: 548\n",
      "index_count: 28\n",
      "thr: 1.6201382298092042\n",
      "graph_5_15/2019-05-15 16:56:48.656000000~2019-05-15 17:11:55.171000000.txt    2.3069503337972255  count: 3502  percentage: 0.037999131944444445  node count: 442  edge count: 489\n",
      "index_count: 29\n",
      "thr: 1.6646893430444862\n",
      "graph_5_15/2019-05-15 17:11:55.171000000~2019-05-15 17:27:58.721000000.txt    2.794901586092996  count: 4957  percentage: 0.033156303510273974  node count: 514  edge count: 535\n",
      "index_count: 30\n",
      "thr: 1.517340683921694\n",
      "graph_5_15/2019-05-15 17:27:58.721000000~2019-05-15 17:43:17.531000000.txt    1.7598939543112404  count: 10377  percentage: 0.051703005420918366  node count: 716  edge count: 759\n",
      "index_count: 31\n",
      "thr: 1.5339353507959217\n",
      "graph_5_15/2019-05-15 17:43:17.531000000~2019-05-15 17:58:19.189000000.txt    2.0757285184222938  count: 8697  percentage: 0.04122895175970874  node count: 572  edge count: 593\n",
      "index_count: 32\n",
      "thr: 1.4916978163393368\n",
      "graph_5_15/2019-05-15 17:58:19.189000000~2019-05-15 18:14:08.311000000.txt    1.7312421563984437  count: 15602  percentage: 0.05011950041118421  node count: 644  edge count: 648\n",
      "index_count: 33\n",
      "thr: 1.4990912766780677\n",
      "graph_5_15/2019-05-15 18:14:08.311000000~2019-05-15 18:29:15.893000000.txt    1.7339624606305437  count: 14536  percentage: 0.05377012310606061  node count: 535  edge count: 536\n",
      "index_count: 34\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "thr: 1.537049552112133\n",
      "graph_5_15/2019-05-15 18:29:15.893000000~2019-05-15 18:44:17.260000000.txt    1.7855619591379732  count: 14272  percentage: 0.04756825938566553  node count: 872  edge count: 904\n",
      "index_count: 35\n",
      "thr: 1.4427678158666049\n",
      "graph_5_15/2019-05-15 18:44:17.260000000~2019-05-15 18:59:18.387000000.txt    1.6951824174970394  count: 5826  percentage: 0.043430939885496185  node count: 281  edge count: 285\n",
      "index_count: 36\n",
      "thr: 1.5122281101908\n",
      "graph_5_15/2019-05-15 18:59:18.387000000~2019-05-15 19:14:19.745000000.txt    1.7272879987785017  count: 14120  percentage: 0.04203982469512195  node count: 625  edge count: 636\n",
      "index_count: 37\n",
      "thr: 1.4999088661364512\n",
      "graph_5_15/2019-05-15 19:14:19.745000000~2019-05-15 19:29:20.800000000.txt    1.6921135548526869  count: 16685  percentage: 0.04476358602335165  node count: 503  edge count: 503\n",
      "index_count: 38\n",
      "thr: 1.522944163388003\n",
      "graph_5_15/2019-05-15 19:29:20.800000000~2019-05-15 19:44:22.319000000.txt    1.731481507383075  count: 13967  percentage: 0.051665334990530304  node count: 410  edge count: 412\n",
      "index_count: 39\n",
      "thr: 1.485350447152477\n",
      "graph_5_15/2019-05-15 19:44:22.319000000~2019-05-15 19:59:22.601000000.txt    1.9241053792837692  count: 5717  percentage: 0.058156331380208336  node count: 447  edge count: 456\n",
      "index_count: 40\n",
      "thr: 1.529125604808793\n",
      "graph_5_15/2019-05-15 19:59:22.601000000~2019-05-15 20:14:24.590000000.txt    1.7438855391000976  count: 21575  percentage: 0.04013206845238095  node count: 906  edge count: 903\n",
      "index_count: 41\n",
      "thr: 1.4964889934680952\n",
      "graph_5_15/2019-05-15 20:14:24.590000000~2019-05-15 20:29:25.523000000.txt    1.8375260558413298  count: 9993  percentage: 0.04066162109375  node count: 359  edge count: 360\n",
      "index_count: 42\n",
      "thr: 1.5286194122052728\n",
      "graph_5_15/2019-05-15 20:29:25.523000000~2019-05-15 20:44:26.676000000.txt    1.8153543569844772  count: 8908  percentage: 0.0448413337628866  node count: 499  edge count: 498\n",
      "index_count: 43\n",
      "thr: 1.4909295230740964\n",
      "graph_5_15/2019-05-15 20:44:26.676000000~2019-05-15 20:59:27.020000000.txt    1.7755991914113933  count: 11207  percentage: 0.041299380896226416  node count: 370  edge count: 369\n",
      "index_count: 44\n",
      "thr: 1.5127787153872978\n",
      "graph_5_15/2019-05-15 20:59:27.020000000~2019-05-15 21:14:28.862000000.txt    1.859326044047343  count: 6753  percentage: 0.0343475341796875  node count: 357  edge count: 359\n",
      "index_count: 45\n",
      "thr: 1.4868796307294314\n",
      "graph_5_15/2019-05-15 21:14:28.862000000~2019-05-15 21:29:30.066000000.txt    1.8269468958821036  count: 11401  percentage: 0.041084092481549817  node count: 722  edge count: 735\n",
      "index_count: 46\n",
      "thr: 1.5110110346884227\n",
      "graph_5_15/2019-05-15 21:29:30.066000000~2019-05-15 21:44:31.381000000.txt    1.6946776846426919  count: 11092  percentage: 0.05612451424870466  node count: 326  edge count: 325\n",
      "index_count: 47\n",
      "thr: 1.4796088346238387\n",
      "graph_5_15/2019-05-15 21:44:31.381000000~2019-05-15 21:59:32.550000000.txt    1.7707544485462756  count: 10256  percentage: 0.04022339357429719  node count: 533  edge count: 536\n",
      "index_count: 48\n",
      "thr: 1.516144278598083\n",
      "graph_5_15/2019-05-15 21:59:32.550000000~2019-05-15 22:14:33.792000000.txt    1.7527989947885019  count: 8896  percentage: 0.04501295336787565  node count: 528  edge count: 529\n",
      "index_count: 49\n",
      "thr: 1.4733468398371954\n",
      "graph_5_15/2019-05-15 22:14:33.792000000~2019-05-15 22:29:35.074000000.txt    1.8314063520895352  count: 6396  percentage: 0.036959134615384616  node count: 446  edge count: 446\n",
      "index_count: 50\n",
      "thr: 1.5926198396632536\n",
      "graph_5_15/2019-05-15 22:29:35.074000000~2019-05-15 22:44:36.431000000.txt    1.9408592182971538  count: 8309  percentage: 0.05409505208333333  node count: 485  edge count: 489\n",
      "index_count: 51\n",
      "thr: 1.4712026310696533\n",
      "graph_5_15/2019-05-15 22:44:36.431000000~2019-05-15 22:59:37.524000000.txt    1.689155609781162  count: 8663  percentage: 0.04208935789800995  node count: 321  edge count: 323\n",
      "index_count: 52\n",
      "thr: 1.508818650519378\n",
      "graph_5_15/2019-05-15 22:59:37.524000000~2019-05-15 23:14:38.671000000.txt    1.714757095064047  count: 13172  percentage: 0.04466417100694445  node count: 417  edge count: 416\n",
      "index_count: 53\n",
      "thr: 1.4780273595521072\n",
      "graph_5_15/2019-05-15 23:14:38.671000000~2019-05-15 23:29:39.881000000.txt    1.8040836752730487  count: 7733  percentage: 0.05208108836206896  node count: 361  edge count: 371\n",
      "index_count: 54\n",
      "thr: 1.536265288188131\n",
      "graph_5_15/2019-05-15 23:29:39.881000000~2019-05-15 23:44:41.019000000.txt    1.8458243271525774  count: 10589  percentage: 0.044003490691489365  node count: 529  edge count: 525\n"
     ]
    }
   ],
   "source": [
    "# node_IDF=torch.load(\"node_IDF\")\n",
    "y_data_5_15=[]\n",
    "df_list_5_15=[]\n",
    "# node_set_list=[]\n",
    "history_list_5_15=[]\n",
    "tw_que=[]\n",
    "his_tw={}\n",
    "current_tw={}\n",
    "loss_list_5_15=[]\n",
    "\n",
    "file_path_list=[]\n",
    "\n",
    "file_path=\"graph_5_15/\"\n",
    "file_l=os.listdir(\"graph_5_15/\")\n",
    "for i in file_l:\n",
    "    file_path_list.append(file_path+i)\n",
    "\n",
    "index_count=0\n",
    "for f_path in sorted(file_path_list):\n",
    "    f=open(f_path)\n",
    "    edge_loss_list=[]\n",
    "    edge_list=[]\n",
    "    print('index_count:',index_count)\n",
    "    \n",
    "    for line in f:\n",
    "        l=line.strip()\n",
    "        jdata=eval(l)\n",
    "        edge_loss_list.append(jdata['loss'])\n",
    "        edge_list.append([str(jdata['srcmsg']),str(jdata['dstmsg'])])\n",
    "    df_list_5_15.append(pd.DataFrame(edge_loss_list))\n",
    "    count,loss_avg,node_set,edge_set=cal_anomaly_loss(edge_loss_list,edge_list,\"graph_5_15/\")\n",
    "    current_tw['name']=f_path\n",
    "    current_tw['loss']=loss_avg\n",
    "    current_tw['index']=index_count\n",
    "    current_tw['nodeset']=node_set\n",
    "\n",
    "    added_que_flag=False\n",
    "    for hq in history_list_5_15:\n",
    "        for his_tw in hq:\n",
    "            if cal_set_rel_bak(current_tw['nodeset'],his_tw['nodeset'],file_list_5_9_12)!=0 and current_tw['name']!=his_tw['name']:\n",
    "                print(\"history queue:\",his_tw['name'])\n",
    "                hq.append(copy.deepcopy(current_tw))\n",
    "                added_que_flag=True\n",
    "                break\n",
    "            if added_que_flag:\n",
    "                break\n",
    "    if added_que_flag is False:\n",
    "        temp_hq=[copy.deepcopy(current_tw)]\n",
    "        history_list_5_15.append(temp_hq)\n",
    "    index_count+=1\n",
    "    loss_list_5_15.append(loss_avg)\n",
    "    print( f_path,\"  \",loss_avg,\" count:\",count,\" percentage:\",count/len(edge_list),\" node count:\",len(node_set),\" edge count:\",len(edge_set))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "['graph_5_15/2019-05-15 10:46:03.642000000~2019-05-15 11:06:20.806000000.txt', 'graph_5_15/2019-05-15 14:07:59.753000000~2019-05-15 14:23:00.265000000.txt', 'graph_5_15/2019-05-15 14:23:00.265000000~2019-05-15 14:38:02.135000000.txt', 'graph_5_15/2019-05-15 15:38:59.175000000~2019-05-15 15:55:38.955000000.txt', 'graph_5_15/2019-05-15 15:55:38.955000000~2019-05-15 16:11:27.687000000.txt', 'graph_5_15/2019-05-15 16:11:27.687000000~2019-05-15 16:26:39.781000000.txt']\n",
      "873.3464440179548\n"
     ]
    }
   ],
   "source": [
    "\n",
    "name_list=[]\n",
    "for hl in history_list_5_15:\n",
    "    loss_count=0\n",
    "    for hq in hl:\n",
    "        if loss_count==0:\n",
    "            loss_count=(loss_count+1)*(hq['loss']+1)\n",
    "        else:\n",
    "            loss_count=(loss_count)*(hq['loss']+1)\n",
    "#     name_list=[]\n",
    "    if loss_count>100:\n",
    "        name_list=[]\n",
    "        for i in hl:\n",
    "            name_list.append(i['name']) \n",
    "        print(name_list)\n",
    "        for i in name_list:\n",
    "            pred_label[i]=1\n",
    "        print(loss_count)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 5-17"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "index_count: 0\n",
      "thr: 1.2396995421672876\n",
      "graph_5_17/2019-05-17 00:00:00.162000000~2019-05-17 00:17:05.023000000.txt    2.2705906906490316  count: 990  percentage: 0.031186995967741934  node count: 282  edge count: 291\n",
      "index_count: 1\n",
      "thr: 1.1348125571401906\n",
      "graph_5_17/2019-05-17 00:17:05.023000000~2019-05-17 00:32:06.529000000.txt    1.667140552249508  count: 305  percentage: 0.0372314453125  node count: 15  edge count: 18\n",
      "index_count: 2\n",
      "thr: 1.0345511625831574\n",
      "graph_5_17/2019-05-17 00:32:06.529000000~2019-05-17 00:47:07.880000000.txt    1.961928488204487  count: 344  percentage: 0.010179924242424242  node count: 61  edge count: 62\n",
      "index_count: 3\n",
      "thr: 1.0156902668151262\n",
      "graph_5_17/2019-05-17 00:47:07.880000000~2019-05-17 01:02:09.047000000.txt    1.4832856792555933  count: 130  percentage: 0.015869140625  node count: 6  edge count: 5\n",
      "index_count: 4\n",
      "thr: 0.981017382587746\n",
      "graph_5_17/2019-05-17 01:02:09.047000000~2019-05-17 01:17:10.410000000.txt    1.4612201134716314  count: 184  percentage: 0.005445075757575758  node count: 21  edge count: 18\n",
      "index_count: 5\n",
      "thr: 1.0555083311394415\n",
      "graph_5_17/2019-05-17 01:17:10.410000000~2019-05-17 01:32:11.778000000.txt    1.3831793064917337  count: 245  percentage: 0.0299072265625  node count: 18  edge count: 14\n",
      "index_count: 6\n",
      "thr: 0.9837670114944248\n",
      "graph_5_17/2019-05-17 01:32:11.778000000~2019-05-17 01:47:12.976000000.txt    1.522691167731204  count: 196  percentage: 0.0059814453125  node count: 15  edge count: 13\n",
      "index_count: 7\n",
      "thr: 0.9969671186691834\n",
      "graph_5_17/2019-05-17 01:47:12.976000000~2019-05-17 02:02:14.158000000.txt    1.2353997311180065  count: 192  percentage: 0.0234375  node count: 6  edge count: 5\n",
      "index_count: 8\n",
      "thr: 0.9803720692280345\n",
      "graph_5_17/2019-05-17 02:02:14.158000000~2019-05-17 02:17:15.482000000.txt    1.4362578692745014  count: 216  percentage: 0.006392045454545455  node count: 19  edge count: 17\n",
      "index_count: 9\n",
      "thr: 1.0535914403518585\n",
      "graph_5_17/2019-05-17 02:17:15.482000000~2019-05-17 02:32:16.882000000.txt    1.4517001010647848  count: 221  percentage: 0.0269775390625  node count: 13  edge count: 12\n",
      "index_count: 10\n",
      "thr: 0.9907612298504431\n",
      "graph_5_17/2019-05-17 02:32:16.882000000~2019-05-17 02:47:18.043000000.txt    1.5319733977248176  count: 220  percentage: 0.0067138671875  node count: 21  edge count: 18\n",
      "index_count: 11\n",
      "thr: 1.0362702593488167\n",
      "graph_5_17/2019-05-17 02:47:18.043000000~2019-05-17 03:02:19.211000000.txt    1.3375017002539105  count: 242  percentage: 0.029541015625  node count: 22  edge count: 20\n",
      "index_count: 12\n",
      "thr: 0.9838471302462898\n",
      "graph_5_17/2019-05-17 03:02:19.211000000~2019-05-17 03:17:20.492000000.txt    1.3508030815324572  count: 290  percentage: 0.008581912878787878  node count: 16  edge count: 16\n",
      "index_count: 13\n",
      "thr: 1.0641620863215153\n",
      "graph_5_17/2019-05-17 03:17:20.492000000~2019-05-17 03:32:21.866000000.txt    1.3675097660127777  count: 267  percentage: 0.0325927734375  node count: 14  edge count: 11\n",
      "index_count: 14\n",
      "thr: 0.9811733350703797\n",
      "graph_5_17/2019-05-17 03:32:21.866000000~2019-05-17 03:47:23.177000000.txt    1.5269065607296186  count: 182  percentage: 0.00555419921875  node count: 12  edge count: 10\n",
      "index_count: 15\n",
      "thr: 1.0096328244415034\n",
      "graph_5_17/2019-05-17 03:47:23.177000000~2019-05-17 04:02:24.594000000.txt    1.3800331195654765  count: 143  percentage: 0.0174560546875  node count: 6  edge count: 5\n",
      "index_count: 16\n",
      "thr: 0.983193851223082\n",
      "graph_5_17/2019-05-17 04:02:24.594000000~2019-05-17 04:17:25.489000000.txt    1.4661504924229456  count: 216  percentage: 0.006591796875  node count: 13  edge count: 11\n",
      "index_count: 17\n",
      "thr: 1.051790287387215\n",
      "graph_5_17/2019-05-17 04:17:25.489000000~2019-05-17 04:32:26.948000000.txt    1.7130591132026205  count: 152  percentage: 0.0185546875  node count: 6  edge count: 5\n",
      "index_count: 18\n",
      "thr: 0.9934257166172931\n",
      "graph_5_17/2019-05-17 04:32:26.948000000~2019-05-17 04:47:28.259000000.txt    1.787113346473684  count: 184  percentage: 0.005445075757575758  node count: 16  edge count: 13\n",
      "index_count: 19\n",
      "thr: 1.0973935330429088\n",
      "graph_5_17/2019-05-17 04:47:28.259000000~2019-05-17 05:02:29.618000000.txt    1.463436902499021  count: 319  percentage: 0.0389404296875  node count: 14  edge count: 12\n",
      "index_count: 20\n",
      "thr: 0.9841414080294593\n",
      "graph_5_17/2019-05-17 05:02:29.618000000~2019-05-17 05:17:30.624000000.txt    1.5017554968948061  count: 211  percentage: 0.006439208984375  node count: 17  edge count: 16\n",
      "index_count: 21\n",
      "thr: 1.015772991799897\n",
      "graph_5_17/2019-05-17 05:17:30.624000000~2019-05-17 05:32:31.961000000.txt    1.3997757434758566  count: 162  percentage: 0.019775390625  node count: 10  edge count: 8\n",
      "index_count: 22\n",
      "thr: 0.9817356739459109\n",
      "graph_5_17/2019-05-17 05:32:31.961000000~2019-05-17 05:47:33.352000000.txt    1.4704599380408168  count: 173  percentage: 0.005279541015625  node count: 18  edge count: 16\n",
      "index_count: 23\n",
      "thr: 1.0687705022094118\n",
      "graph_5_17/2019-05-17 05:47:33.352000000~2019-05-17 06:02:34.706000000.txt    1.763323718487455  count: 124  percentage: 0.01513671875  node count: 10  edge count: 7\n",
      "index_count: 24\n",
      "thr: 1.0018876825442344\n",
      "graph_5_17/2019-05-17 06:02:34.706000000~2019-05-17 06:17:36.113000000.txt    1.9268215175451382  count: 184  percentage: 0.005445075757575758  node count: 17  edge count: 14\n",
      "index_count: 25\n",
      "thr: 1.0682494169545946\n",
      "graph_5_17/2019-05-17 06:17:36.113000000~2019-05-17 06:32:37.454000000.txt    1.7797694110392397  count: 137  percentage: 0.0167236328125  node count: 10  edge count: 8\n",
      "index_count: 26\n",
      "thr: 0.9851170017234624\n",
      "graph_5_17/2019-05-17 06:32:37.454000000~2019-05-17 06:47:38.801000000.txt    1.39575158453001  count: 261  percentage: 0.007965087890625  node count: 18  edge count: 17\n",
      "index_count: 27\n",
      "thr: 1.0539485475970185\n",
      "graph_5_17/2019-05-17 06:47:38.801000000~2019-05-17 07:02:40.131000000.txt    1.5572482667561511  count: 175  percentage: 0.0213623046875  node count: 6  edge count: 5\n",
      "index_count: 28\n",
      "thr: 0.9798056988453278\n",
      "graph_5_17/2019-05-17 07:02:40.131000000~2019-05-17 07:17:41.404000000.txt    1.4418642077616575  count: 195  percentage: 0.005950927734375  node count: 13  edge count: 12\n",
      "index_count: 29\n",
      "thr: 1.1086565574300402\n",
      "graph_5_17/2019-05-17 07:17:41.404000000~2019-05-17 07:32:42.668000000.txt    2.036548176154525  count: 161  percentage: 0.0196533203125  node count: 5  edge count: 4\n",
      "index_count: 30\n",
      "thr: 0.9829054551338114\n",
      "graph_5_17/2019-05-17 07:32:42.668000000~2019-05-17 07:47:44.085000000.txt    1.5385896566418829  count: 201  percentage: 0.005948153409090909  node count: 10  edge count: 9\n",
      "index_count: 31\n",
      "thr: 1.0125633500557223\n",
      "graph_5_17/2019-05-17 07:47:44.085000000~2019-05-17 08:02:45.378000000.txt    1.3288849342774691  count: 196  percentage: 0.02392578125  node count: 8  edge count: 7\n",
      "index_count: 32\n",
      "thr: 0.9800705101221749\n",
      "graph_5_17/2019-05-17 08:02:45.378000000~2019-05-17 08:17:46.281000000.txt    1.4380028432535752  count: 200  percentage: 0.005918560606060606  node count: 15  edge count: 13\n",
      "index_count: 33\n",
      "thr: 1.1146290116273203\n",
      "graph_5_17/2019-05-17 08:17:46.281000000~2019-05-17 08:32:47.467000000.txt    1.575210513217521  count: 310  percentage: 0.037841796875  node count: 10  edge count: 11\n",
      "index_count: 34\n",
      "thr: 1.0175579161179016\n",
      "graph_5_17/2019-05-17 08:32:47.467000000~2019-05-17 08:47:48.296000000.txt    1.7343688873560295  count: 311  percentage: 0.009490966796875  node count: 19  edge count: 19\n",
      "index_count: 35\n",
      "thr: 1.6366418479900693\n",
      "graph_5_17/2019-05-17 08:47:48.296000000~2019-05-17 09:02:49.657000000.txt    2.1535546636533036  count: 2416  percentage: 0.0471875  node count: 149  edge count: 149\n",
      "index_count: 36\n",
      "thr: 1.5059480278498178\n",
      "graph_5_17/2019-05-17 09:02:49.657000000~2019-05-17 09:17:51.260000000.txt    2.011325468869075  count: 5847  percentage: 0.043922776442307694  node count: 518  edge count: 536\n",
      "index_count: 37\n",
      "thr: 1.5259306704617672\n",
      "graph_5_17/2019-05-17 09:17:51.260000000~2019-05-17 09:32:52.628000000.txt    1.679563899100216  count: 16822  percentage: 0.052822297025723476  node count: 467  edge count: 477\n",
      "index_count: 38\n",
      "thr: 1.4843648947235435\n",
      "graph_5_17/2019-05-17 09:32:52.628000000~2019-05-17 09:47:52.897000000.txt    1.6983844430882875  count: 18182  percentage: 0.04450090068922306  node count: 717  edge count: 731\n",
      "index_count: 39\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "thr: 1.536448356409168\n",
      "graph_5_17/2019-05-17 09:47:52.897000000~2019-05-17 10:02:57.226000000.txt    1.7534287099185661  count: 12653  percentage: 0.05191783744747899  node count: 473  edge count: 481\n",
      "index_count: 40\n",
      "thr: 1.5078394245768911\n",
      "graph_5_17/2019-05-17 10:02:57.226000000~2019-05-17 10:17:59.598000000.txt    1.7567328132033668  count: 15830  percentage: 0.05051955678104575  node count: 831  edge count: 860\n",
      "index_count: 41\n",
      "thr: 1.5185919554989606\n",
      "graph_5_17/2019-05-17 10:17:59.598000000~2019-05-17 10:33:01.000000000.txt    1.6581166637192228  count: 22392  percentage: 0.042133309248554913  node count: 644  edge count: 648\n",
      "index_count: 42\n",
      "thr: 1.4496993737458292\n",
      "graph_5_17/2019-05-17 10:33:01.000000000~2019-05-17 10:48:02.004000000.txt    1.6515785491940222  count: 10132  percentage: 0.043781111725663714  node count: 236  edge count: 235\n",
      "index_count: 43\n",
      "thr: 1.5339011066633872\n",
      "graph_5_17/2019-05-17 10:48:02.004000000~2019-05-17 11:03:03.339000000.txt    1.7268913049731953  count: 9008  percentage: 0.04887152777777778  node count: 431  edge count: 435\n",
      "index_count: 44\n",
      "thr: 1.4482174635475333\n",
      "graph_5_17/2019-05-17 11:03:03.339000000~2019-05-17 11:18:04.107000000.txt    1.6672794558244428  count: 11334  percentage: 0.05054045376712329  node count: 216  edge count: 215\n",
      "index_count: 45\n",
      "thr: 1.5217335069497828\n",
      "graph_5_17/2019-05-17 11:18:04.107000000~2019-05-17 11:33:05.441000000.txt    1.704740915201315  count: 10767  percentage: 0.043629246628630707  node count: 286  edge count: 287\n",
      "index_count: 46\n",
      "thr: 1.479893383586448\n",
      "node: {'subject': '/system/bin/screencap'}  IDF: 5.030437921392435\n",
      "history queue: graph_5_17/2019-05-17 00:32:06.529000000~2019-05-17 00:47:07.880000000.txt\n",
      "graph_5_17/2019-05-17 11:33:05.441000000~2019-05-17 11:48:07.561000000.txt    1.654575734428404  count: 17774  percentage: 0.04834936455431755  node count: 516  edge count: 526\n",
      "index_count: 47\n",
      "thr: 1.5325470727151145\n",
      "graph_5_17/2019-05-17 11:48:07.561000000~2019-05-17 12:03:08.991000000.txt    1.8083182419647401  count: 7967  percentage: 0.03504627674549549  node count: 533  edge count: 533\n",
      "index_count: 48\n",
      "thr: 1.351444915141395\n",
      "graph_5_17/2019-05-17 12:03:08.991000000~2019-05-17 12:18:10.191000000.txt    1.6205884389335685  count: 4960  percentage: 0.05442415730337079  node count: 340  edge count: 339\n",
      "index_count: 49\n",
      "thr: 1.4977624289447438\n",
      "graph_5_17/2019-05-17 12:18:10.191000000~2019-05-17 12:33:11.434000000.txt    1.7900789630257758  count: 3109  percentage: 0.03748312114197531  node count: 151  edge count: 150\n",
      "index_count: 50\n",
      "thr: 1.4613085530146879\n",
      "graph_5_17/2019-05-17 12:33:11.434000000~2019-05-17 12:48:13.087000000.txt    1.6659277375979764  count: 13670  percentage: 0.05493666409465021  node count: 412  edge count: 423\n",
      "index_count: 51\n",
      "thr: 1.5151495368956138\n",
      "graph_5_17/2019-05-17 12:48:13.087000000~2019-05-17 13:03:13.328000000.txt    1.7628285758546358  count: 3813  percentage: 0.03838796713917526  node count: 138  edge count: 137\n",
      "index_count: 52\n",
      "thr: 1.4673863985813318\n",
      "graph_5_17/2019-05-17 13:03:13.328000000~2019-05-17 13:18:15.486000000.txt    1.6980736248642363  count: 16173  percentage: 0.05161420036764706  node count: 325  edge count: 321\n",
      "index_count: 53\n",
      "thr: 1.5096021668819009\n",
      "graph_5_17/2019-05-17 13:18:15.486000000~2019-05-17 13:33:16.997000000.txt    1.7035516022349453  count: 5388  percentage: 0.04697963169642857  node count: 360  edge count: 362\n",
      "index_count: 54\n",
      "thr: 1.4582848886787732\n",
      "graph_5_17/2019-05-17 13:33:16.997000000~2019-05-17 13:50:01.226000000.txt    1.6845064686675744  count: 13871  percentage: 0.05250348231589147  node count: 516  edge count: 512\n",
      "index_count: 55\n",
      "thr: 1.4937146792770792\n",
      "graph_5_17/2019-05-17 13:50:01.226000000~2019-05-17 14:05:21.553000000.txt    1.722453209827564  count: 13689  percentage: 0.04026555440512048  node count: 399  edge count: 401\n",
      "index_count: 56\n",
      "thr: 1.488337517425892\n",
      "node: {'file': '/data/data/ca.jamdat.flight.bejeweled/filesn.bin'}  IDF: 5.030437921392435\n",
      "history queue: graph_5_17/2019-05-17 13:50:01.226000000~2019-05-17 14:05:21.553000000.txt\n",
      "graph_5_17/2019-05-17 14:05:21.553000000~2019-05-17 14:20:43.541000000.txt    1.655632212174818  count: 12164  percentage: 0.04551305076628352  node count: 296  edge count: 293\n",
      "index_count: 57\n",
      "thr: 1.5298215546907206\n",
      "node: {'file': '/data/data/ca.jamdat.flight.bejeweled/filesn.bin'}  IDF: 5.030437921392435\n",
      "history queue: graph_5_17/2019-05-17 13:50:01.226000000~2019-05-17 14:05:21.553000000.txt\n",
      "graph_5_17/2019-05-17 14:20:43.541000000~2019-05-17 14:35:45.993000000.txt    1.7180258501495795  count: 18878  percentage: 0.04776048413212435  node count: 558  edge count: 557\n",
      "index_count: 58\n",
      "thr: 1.5038008136247294\n",
      "graph_5_17/2019-05-17 14:35:45.993000000~2019-05-17 14:50:52.897000000.txt    1.7166545144415775  count: 12778  percentage: 0.05093271683673469  node count: 566  edge count: 574\n",
      "index_count: 59\n",
      "thr: 1.5721914240286177\n",
      "node: {'subject': '/system/bin/screencap'}  IDF: 5.030437921392435\n",
      "history queue: graph_5_17/2019-05-17 00:32:06.529000000~2019-05-17 00:47:07.880000000.txt\n",
      "graph_5_17/2019-05-17 14:50:52.897000000~2019-05-17 15:06:00.371000000.txt    1.9433101986261951  count: 6121  percentage: 0.041510687934027776  node count: 491  edge count: 500\n",
      "index_count: 60\n",
      "thr: 1.4361148489094786\n",
      "node: {'subject': '/system/bin/screencap'}  IDF: 5.030437921392435\n",
      "history queue: graph_5_17/2019-05-17 00:32:06.529000000~2019-05-17 00:47:07.880000000.txt\n",
      "graph_5_17/2019-05-17 15:06:00.371000000~2019-05-17 15:21:40.474000000.txt    2.0532826288756127  count: 2936  percentage: 0.062330163043478264  node count: 487  edge count: 497\n",
      "index_count: 61\n",
      "thr: 1.590724119422262\n",
      "node: {'file': '/config/sdcardfs/de.belu.appstarter'}  IDF: 5.030437921392435\n",
      "history queue: graph_5_17/2019-05-17 15:06:00.371000000~2019-05-17 15:21:40.474000000.txt\n",
      "graph_5_17/2019-05-17 15:21:40.474000000~2019-05-17 15:36:41.964000000.txt    2.1261760384613866  count: 1173  percentage: 0.07159423828125  node count: 249  edge count: 246\n",
      "index_count: 62\n",
      "thr: 1.207546141101738\n",
      "graph_5_17/2019-05-17 15:36:41.964000000~2019-05-17 15:51:43.493000000.txt    2.4219352957524793  count: 553  percentage: 0.023479959239130436  node count: 21  edge count: 19\n",
      "index_count: 63\n",
      "thr: 1.6607331298559962\n",
      "node: {'file': '/config/sdcardfs/de.belu.appstarter'}  IDF: 5.030437921392435\n",
      "history queue: graph_5_17/2019-05-17 15:06:00.371000000~2019-05-17 15:21:40.474000000.txt\n",
      "graph_5_17/2019-05-17 15:51:43.493000000~2019-05-17 16:06:44.953000000.txt    2.185891072412184  count: 1213  percentage: 0.07897135416666666  node count: 244  edge count: 240\n",
      "index_count: 64\n",
      "thr: 1.3152183199802852\n",
      "graph_5_17/2019-05-17 16:06:44.953000000~2019-05-17 16:21:46.485000000.txt    2.1828270146372533  count: 1142  percentage: 0.044609375  node count: 253  edge count: 250\n",
      "index_count: 65\n",
      "thr: 1.5438617306878446\n",
      "node: {'file': '/config/sdcardfs/de.belu.appstarter'}  IDF: 5.030437921392435\n",
      "history queue: graph_5_17/2019-05-17 15:06:00.371000000~2019-05-17 15:21:40.474000000.txt\n",
      "graph_5_17/2019-05-17 16:21:46.485000000~2019-05-17 16:36:47.843000000.txt    2.1256183969897138  count: 1045  percentage: 0.06803385416666667  node count: 225  edge count: 222\n",
      "index_count: 66\n",
      "thr: 1.2679245883071466\n",
      "node: {'subject': '/data/data/de.belu.appstarter/busybox'}  IDF: 5.030437921392435\n",
      "history queue: graph_5_17/2019-05-17 15:21:40.474000000~2019-05-17 15:36:41.964000000.txt\n",
      "graph_5_17/2019-05-17 16:36:47.843000000~2019-05-17 16:51:49.352000000.txt    2.270210491558881  count: 818  percentage: 0.034731657608695655  node count: 77  edge count: 77\n",
      "index_count: 67\n",
      "thr: 1.4752901199758235\n",
      "graph_5_17/2019-05-17 16:51:49.352000000~2019-05-17 17:06:50.973000000.txt    2.4117964364020392  count: 646  percentage: 0.05735085227272727  node count: 32  edge count: 31\n",
      "index_count: 68\n",
      "thr: 1.304167466218522\n",
      "graph_5_17/2019-05-17 17:06:50.973000000~2019-05-17 17:21:51.831000000.txt    2.7856382810202702  count: 541  percentage: 0.024014559659090908  node count: 26  edge count: 25\n",
      "index_count: 69\n",
      "thr: 1.3789705074082572\n",
      "graph_5_17/2019-05-17 17:21:51.831000000~2019-05-17 17:36:53.217000000.txt    2.293952872826255  count: 523  percentage: 0.046431107954545456  node count: 30  edge count: 28\n",
      "index_count: 70\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "thr: 1.2617493156536603\n",
      "graph_5_17/2019-05-17 17:36:53.217000000~2019-05-17 17:51:54.220000000.txt    2.4691279896931744  count: 649  percentage: 0.02880859375  node count: 31  edge count: 28\n",
      "index_count: 71\n",
      "thr: 1.3948741515516274\n",
      "graph_5_17/2019-05-17 17:51:54.220000000~2019-05-17 18:06:55.854000000.txt    2.329598121522706  count: 574  percentage: 0.050958806818181816  node count: 29  edge count: 26\n",
      "index_count: 72\n",
      "thr: 1.2257049771073185\n",
      "graph_5_17/2019-05-17 18:06:55.854000000~2019-05-17 18:21:57.246000000.txt    2.6283194073036733  count: 537  percentage: 0.0218505859375  node count: 26  edge count: 22\n",
      "index_count: 73\n",
      "thr: 1.430576042023203\n",
      "graph_5_17/2019-05-17 18:21:57.246000000~2019-05-17 18:36:58.618000000.txt    2.453868646388667  count: 563  percentage: 0.049982244318181816  node count: 26  edge count: 25\n",
      "index_count: 74\n",
      "thr: 1.2142718307793983\n",
      "graph_5_17/2019-05-17 18:36:58.618000000~2019-05-17 18:52:00.027000000.txt    2.609119129039968  count: 526  percentage: 0.021402994791666668  node count: 25  edge count: 21\n",
      "index_count: 75\n",
      "thr: 1.5488479876283743\n",
      "graph_5_17/2019-05-17 18:52:00.027000000~2019-05-17 19:07:01.518000000.txt    2.9423228457472232  count: 453  percentage: 0.040216619318181816  node count: 24  edge count: 24\n",
      "index_count: 76\n",
      "thr: 1.253821164755407\n",
      "graph_5_17/2019-05-17 19:07:01.518000000~2019-05-17 19:22:02.614000000.txt    2.441800355531947  count: 635  percentage: 0.0248046875  node count: 37  edge count: 36\n",
      "index_count: 77\n",
      "thr: 2.001899231319177\n",
      "graph_5_17/2019-05-17 19:22:02.614000000~2019-05-17 19:37:03.821000000.txt    3.231957616663255  count: 557  percentage: 0.05439453125  node count: 27  edge count: 27\n",
      "index_count: 78\n",
      "thr: 1.2146597709603184\n",
      "graph_5_17/2019-05-17 19:37:03.821000000~2019-05-17 19:52:04.945000000.txt    2.2225216207294944  count: 744  percentage: 0.0290625  node count: 31  edge count: 33\n",
      "index_count: 79\n",
      "thr: 2.0339529135400336\n",
      "graph_5_17/2019-05-17 19:52:04.945000000~2019-05-17 20:07:06.424000000.txt    3.1284583074762784  count: 648  percentage: 0.06328125  node count: 49  edge count: 50\n",
      "index_count: 80\n",
      "thr: 1.3165595895303976\n",
      "graph_5_17/2019-05-17 20:07:06.424000000~2019-05-17 20:22:07.965000000.txt    2.6124149049012195  count: 694  percentage: 0.025101273148148147  node count: 29  edge count: 29\n",
      "index_count: 81\n",
      "thr: 1.7899658649738903\n",
      "graph_5_17/2019-05-17 20:22:07.965000000~2019-05-17 20:37:09.206000000.txt    2.8906842087917664  count: 708  percentage: 0.069140625  node count: 24  edge count: 24\n",
      "index_count: 82\n",
      "thr: 1.2121268570630819\n",
      "graph_5_17/2019-05-17 20:37:09.206000000~2019-05-17 20:52:10.643000000.txt    2.4713329913251756  count: 637  percentage: 0.023039641203703703  node count: 32  edge count: 34\n",
      "index_count: 83\n",
      "thr: 1.632685544464833\n",
      "graph_5_17/2019-05-17 20:52:10.643000000~2019-05-17 21:07:12.192000000.txt    2.80460218442781  count: 520  percentage: 0.05078125  node count: 27  edge count: 27\n",
      "index_count: 84\n",
      "thr: 1.2840729164234246\n",
      "graph_5_17/2019-05-17 21:07:12.192000000~2019-05-17 21:22:13.697000000.txt    2.75577786449761  count: 624  percentage: 0.022569444444444444  node count: 29  edge count: 27\n",
      "index_count: 85\n",
      "thr: 1.4214440685715055\n",
      "graph_5_17/2019-05-17 21:22:13.697000000~2019-05-17 21:37:14.934000000.txt    2.4821767176818046  count: 507  percentage: 0.04951171875  node count: 23  edge count: 21\n",
      "index_count: 86\n",
      "thr: 1.186705873991679\n",
      "graph_5_17/2019-05-17 21:37:14.934000000~2019-05-17 21:52:16.493000000.txt    2.4735399170991954  count: 574  percentage: 0.02076099537037037  node count: 26  edge count: 23\n",
      "index_count: 87\n",
      "thr: 1.4256199871216322\n",
      "graph_5_17/2019-05-17 21:52:16.493000000~2019-05-17 22:07:18.054000000.txt    2.5280296727204745  count: 491  percentage: 0.04794921875  node count: 21  edge count: 20\n",
      "index_count: 88\n",
      "thr: 1.1953779923211978\n",
      "graph_5_17/2019-05-17 22:07:18.054000000~2019-05-17 22:22:19.501000000.txt    2.5582881726782816  count: 562  percentage: 0.02032696759259259  node count: 22  edge count: 20\n",
      "index_count: 89\n",
      "thr: 1.4762192557856153\n",
      "graph_5_17/2019-05-17 22:22:19.501000000~2019-05-17 22:37:20.816000000.txt    2.693705610494329  count: 484  percentage: 0.047265625  node count: 25  edge count: 23\n",
      "index_count: 90\n",
      "thr: 1.1974937065894822\n",
      "graph_5_17/2019-05-17 22:37:20.816000000~2019-05-17 22:52:22.392000000.txt    2.5810960290641334  count: 560  percentage: 0.02025462962962963  node count: 23  edge count: 20\n",
      "index_count: 91\n",
      "thr: 1.4549232913504793\n",
      "graph_5_17/2019-05-17 22:52:22.392000000~2019-05-17 23:07:23.887000000.txt    2.604610457981731  count: 505  percentage: 0.04931640625  node count: 22  edge count: 20\n",
      "index_count: 92\n",
      "thr: 1.1676575801088163\n",
      "graph_5_17/2019-05-17 23:07:23.887000000~2019-05-17 23:22:25.304000000.txt    2.337142134812928  count: 583  percentage: 0.021086516203703703  node count: 23  edge count: 21\n",
      "index_count: 93\n",
      "thr: 1.4247330554370823\n",
      "graph_5_17/2019-05-17 23:22:25.304000000~2019-05-17 23:37:26.760000000.txt    2.567581575800418  count: 472  percentage: 0.04609375  node count: 19  edge count: 18\n",
      "index_count: 94\n",
      "thr: 1.1704462972502838\n",
      "graph_5_17/2019-05-17 23:37:26.760000000~2019-05-17 23:52:28.261000000.txt    2.400144157698252  count: 570  percentage: 0.020616319444444444  node count: 27  edge count: 24\n"
     ]
    }
   ],
   "source": [
    "# node_IDF=torch.load(\"node_IDF\")\n",
    "y_data_5_17=[]\n",
    "df_list_5_17=[]\n",
    "# node_set_list=[]\n",
    "history_list_5_17=[]\n",
    "tw_que=[]\n",
    "his_tw={}\n",
    "current_tw={}\n",
    "\n",
    "loss_list_5_17=[]\n",
    "\n",
    "file_path_list=[]\n",
    "\n",
    "file_path=\"graph_5_17/\"\n",
    "file_l=os.listdir(\"graph_5_17/\")\n",
    "for i in file_l:\n",
    "    file_path_list.append(file_path+i)\n",
    "\n",
    "index_count=0\n",
    "for f_path in sorted(file_path_list):\n",
    "    f=open(f_path)\n",
    "    edge_loss_list=[]\n",
    "    edge_list=[]\n",
    "    print('index_count:',index_count)\n",
    "    \n",
    "    for line in f:\n",
    "        l=line.strip()\n",
    "        jdata=eval(l)\n",
    "        edge_loss_list.append(jdata['loss'])\n",
    "        edge_list.append([str(jdata['srcmsg']),str(jdata['dstmsg'])])\n",
    "    df_list_5_17.append(pd.DataFrame(edge_loss_list))\n",
    "    count,loss_avg,node_set,edge_set=cal_anomaly_loss(edge_loss_list,edge_list,\"graph_5_17/\")\n",
    "    current_tw['name']=f_path\n",
    "    current_tw['loss']=loss_avg\n",
    "    current_tw['index']=index_count\n",
    "    current_tw['nodeset']=node_set\n",
    "\n",
    "    added_que_flag=False\n",
    "    for hq in history_list_5_17:\n",
    "        for his_tw in hq:\n",
    "            if cal_set_rel_bak(current_tw['nodeset'],his_tw['nodeset'],file_list_5_9_12)!=0 and current_tw['name']!=his_tw['name']:\n",
    "                print(\"history queue:\",his_tw['name'])\n",
    "                hq.append(copy.deepcopy(current_tw))\n",
    "                added_que_flag=True\n",
    "                break\n",
    "            if added_que_flag:\n",
    "                break\n",
    "    if added_que_flag is False:\n",
    "        temp_hq=[copy.deepcopy(current_tw)]\n",
    "        history_list_5_17.append(temp_hq)\n",
    "    index_count+=1\n",
    "    loss_list_5_17.append(loss_avg)\n",
    "    print( f_path,\"  \",loss_avg,\" count:\",count,\" percentage:\",count/len(edge_list),\" node count:\",len(node_set),\" edge count:\",len(edge_set))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "['graph_5_17/2019-05-17 00:32:06.529000000~2019-05-17 00:47:07.880000000.txt', 'graph_5_17/2019-05-17 11:33:05.441000000~2019-05-17 11:48:07.561000000.txt', 'graph_5_17/2019-05-17 14:50:52.897000000~2019-05-17 15:06:00.371000000.txt', 'graph_5_17/2019-05-17 15:06:00.371000000~2019-05-17 15:21:40.474000000.txt', 'graph_5_17/2019-05-17 15:21:40.474000000~2019-05-17 15:36:41.964000000.txt', 'graph_5_17/2019-05-17 15:51:43.493000000~2019-05-17 16:06:44.953000000.txt', 'graph_5_17/2019-05-17 16:21:46.485000000~2019-05-17 16:36:47.843000000.txt', 'graph_5_17/2019-05-17 16:36:47.843000000~2019-05-17 16:51:49.352000000.txt']\n",
      "7193.30948885869\n"
     ]
    }
   ],
   "source": [
    "name_list=[]\n",
    "for hl in history_list_5_17:\n",
    "    loss_count=0\n",
    "    for hq in hl:\n",
    "        if loss_count==0:\n",
    "            loss_count=(loss_count+1)*(hq['loss']+1)\n",
    "        else:\n",
    "            loss_count=(loss_count)*(hq['loss']+1)\n",
    "#     name_list=[]\n",
    "    if loss_count>100:\n",
    "        name_list=[]\n",
    "        for i in hl:\n",
    "            name_list.append(i['name']) \n",
    "        print(name_list)\n",
    "        for i in name_list:\n",
    "            pred_label[i]=1\n",
    "        print(loss_count)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "metadata": {},
   "outputs": [],
   "source": [
    "from sklearn.metrics import average_precision_score, roc_auc_score\n",
    "\n",
    "# from sklearn.metrics import plot_roc_curve,roc_curve,auc,roc_auc_score\n",
    "import torch\n",
    "from sklearn import preprocessing\n",
    "import matplotlib.pyplot as plt\n",
    "import numpy as np\n",
    "from sklearn.preprocessing import MinMaxScaler\n",
    "from sklearn.metrics import confusion_matrix\n",
    "\n",
    "def plot_thr():\n",
    "    np.seterr(invalid='ignore')\n",
    "    step=0.01\n",
    "    thr_list=torch.arange(-5,5,step)\n",
    "    \n",
    "    \n",
    "\n",
    "    precision_list=[]\n",
    "    recall_list=[]\n",
    "    fscore_list=[]\n",
    "    accuracy_list=[]\n",
    "    auc_val_list=[]\n",
    "    for thr in thr_list:\n",
    "        threshold=thr\n",
    "        y_prediction=[]\n",
    "        for i in y_test_scores:\n",
    "            if i >threshold:\n",
    "                y_prediction.append(1)\n",
    "            else:\n",
    "                y_prediction.append(0)\n",
    "        precision,recall,fscore,accuracy,auc_val=classifier_evaluation(y_test, y_prediction)   \n",
    "        precision_list.append(float(precision))\n",
    "        recall_list.append(float(recall))\n",
    "        fscore_list.append(float(fscore))\n",
    "        accuracy_list.append(float(accuracy))\n",
    "        auc_val_list.append(float(auc_val))\n",
    "\n",
    "    max_fscore=max(fscore_list)\n",
    "    max_fscore_index=fscore_list.index(max_fscore)\n",
    "    print(max_fscore_index)\n",
    "    print(\"max threshold:\",thr_list[max_fscore_index])\n",
    "    print('precision:',precision_list[max_fscore_index])\n",
    "    print('recall:',recall_list[max_fscore_index])\n",
    "    print('fscore:',fscore_list[max_fscore_index])\n",
    "    print('accuracy:',accuracy_list[max_fscore_index])    \n",
    "    print('auc:',auc_val_list[max_fscore_index])\n",
    "\n",
    "    plt.plot(thr_list,precision_list,color='red',label='precision',linewidth=2.0,linestyle='-')\n",
    "    plt.plot(thr_list,recall_list,color='orange',label='recall',linewidth=2.0,linestyle='solid')\n",
    "    plt.plot(thr_list,fscore_list,color='y',label='F-score',linewidth=2.0,linestyle='dashed')\n",
    "    plt.plot(thr_list,accuracy_list,color='g',label='accuracy',linewidth=2.0,linestyle='dashdot')\n",
    "    plt.plot(thr_list,auc_val_list,color='b',label='auc_val',linewidth=2.0,linestyle='dotted')\n",
    "\n",
    "\n",
    "    plt.xlabel(\"Threshold\", fontdict={'size': 16})\n",
    "    plt.ylabel(\"Rate\", fontdict={'size': 16})\n",
    "    plt.title(\"Different evaluation Indicators by varying threshold value\", fontdict={'size': 12})\n",
    "    plt.legend(loc='best', fontsize=12, markerscale=0.5)\n",
    "    plt.show()\n",
    "\n",
    "def classifier_evaluation(y_test, y_test_pred):\n",
    "    # groundtruth, pred_value\n",
    "    tn, fp, fn, tp =confusion_matrix(y_test, y_test_pred).ravel()\n",
    "#     tn+=100\n",
    "#     print(clf_name,\" : \")\n",
    "    print('tn:',tn)\n",
    "    print('fp:',fp)\n",
    "    print('fn:',fn)\n",
    "    print('tp:',tp)\n",
    "    precision=tp/(tp+fp)\n",
    "    recall=tp/(tp+fn)\n",
    "    accuracy=(tp+tn)/(tp+tn+fp+fn)\n",
    "    fscore=2*(precision*recall)/(precision+recall)    \n",
    "    auc_val=roc_auc_score(y_test, y_test_pred)\n",
    "    print(\"precision:\",precision)\n",
    "    print(\"recall:\",recall)\n",
    "    print(\"fscore:\",fscore)\n",
    "    print(\"accuracy:\",accuracy)\n",
    "    print(\"auc_val:\",auc_val)\n",
    "    return precision,recall,fscore,accuracy,auc_val\n",
    "\n",
    "def minmax(data):\n",
    "    min_val=min(data)\n",
    "    max_val=max(data)\n",
    "    ans=[]\n",
    "    for i in data:\n",
    "        ans.append((i-min_val)/(max_val-min_val))\n",
    "    return ans\n",
    "\n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "metadata": {},
   "outputs": [],
   "source": [
    "y=[]\n",
    "y_pred=[]\n",
    "for i in labels:\n",
    "    y.append(labels[i])\n",
    "    y_pred.append(pred_label[i])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tn: 218\n",
      "fp: 4\n",
      "fn: 0\n",
      "tp: 10\n",
      "precision: 0.7142857142857143\n",
      "recall: 1.0\n",
      "fscore: 0.8333333333333333\n",
      "accuracy: 0.9827586206896551\n",
      "auc_val: 0.990990990990991\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "(0.7142857142857143,\n",
       " 1.0,\n",
       " 0.8333333333333333,\n",
       " 0.9827586206896551,\n",
       " 0.990990990990991)"
      ]
     },
     "execution_count": 37,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "classifier_evaluation(y,y_pred)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# count the number of the attack edges"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "metadata": {},
   "outputs": [],
   "source": [
    "def keyword_hit(line):\n",
    "    attack_nodes=[\n",
    "            'barephone-instr.apk',\n",
    "        'screencap-instr.apk',\n",
    "           'de.belu.appstarter',\n",
    "        './run_webserver.sh',\n",
    "        'appstarter-instr.apk',\n",
    "        'screenshot.png',\n",
    "        'screenshot',\n",
    "        '/dev/msm_g711tlaw',\n",
    "        'com.android.providers.contacts',\n",
    "        'barephone',\n",
    "        'busybox',\n",
    "\n",
    "        \n",
    "        '/data/local/tmp',\n",
    "        'calllog.db',\n",
    "        'calendar.db',        \n",
    "        'external.db',\n",
    "        'internal.db',\n",
    "        'lastAccess.db',\n",
    "        'mmssms.db',\n",
    "        \n",
    "\n",
    "#         '77.138.117.150',      \n",
    "#         '128.55.12.33',\n",
    "#         '128.55.12.233',\n",
    "#         '128.55.12.166',\n",
    "#         '49.8.46.240',\n",
    "#         '42.183.7.162',\n",
    "#         '133.39.25.45', \n",
    "        ]\n",
    "\n",
    "    flag=False\n",
    "    for i in attack_nodes:\n",
    "        if i in line:\n",
    "            flag=True\n",
    "            break\n",
    "    return flag\n",
    "\n",
    "\n",
    "\n",
    "files=[]\n",
    "\n",
    "filelist = os.listdir(\"graph_5_15\")\n",
    "for f in filelist:    \n",
    "    files.append(\"graph_5_15/\"+f) \n",
    "    \n",
    "filelist = os.listdir(\"graph_5_17\")\n",
    "for f in filelist:\n",
    "    files.append(\"graph_5_17/\"+f) "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|████████████████████████████████████████████████████████████████████████████████████████| 150/150 [00:17<00:00,  8.45it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "4044\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n"
     ]
    }
   ],
   "source": [
    "attack_edge_count=0\n",
    "for fpath in tqdm(files):\n",
    "    f=open(fpath)\n",
    "    for line in f:\n",
    "        if keyword_hit(line):\n",
    "            attack_edge_count+=1\n",
    "print(attack_edge_count)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Visualization"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 10%|█████████                                                                                  | 1/10 [00:01<00:09,  1.10s/it]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.659518992846858\n",
      "0.5674487554266885\n",
      "thr: 1.5106921259868908\n",
      "0.5534007007233754\n",
      "0.4498819074684684\n",
      "thr: 1.228223561926078\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 30%|███████████████████████████▎                                                               | 3/10 [00:02<00:06,  1.16it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.6576207372757144\n",
      "0.5560456364225123\n",
      "thr: 1.4916891919094828\n",
      "0.5148039244530523\n",
      "0.38085547794764996\n",
      "thr: 1.0860871413745272\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 50%|█████████████████████████████████████████████▌                                             | 5/10 [00:04<00:04,  1.15it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.6959515345520622\n",
      "0.5841599263177036\n",
      "thr: 1.5721914240286177\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 70%|███████████████████████████████████████████████████████████████▋                           | 7/10 [00:04<00:01,  1.83it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.6246477632806717\n",
      "0.5409780570858713\n",
      "thr: 1.4361148489094786\n",
      "0.7208051666068069\n",
      "0.5799459685436368\n",
      "thr: 1.590724119422262\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      " 90%|█████████████████████████████████████████████████████████████████████████████████▉         | 9/10 [00:04<00:00,  2.98it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.7665564077627399\n",
      "0.5961178147288376\n",
      "thr: 1.6607331298559962\n",
      "0.7082760480609182\n",
      "0.5570571217512844\n",
      "thr: 1.5438617306878446\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "100%|██████████████████████████████████████████████████████████████████████████████████████████| 10/10 [00:05<00:00,  1.93it/s]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.5543714374887345\n",
      "0.4757021005456081\n",
      "thr: 1.2679245883071466\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n"
     ]
    }
   ],
   "source": [
    "import os\n",
    "\n",
    "from graphviz import Digraph\n",
    "import networkx as nx\n",
    "import datetime\n",
    "import community.community_louvain as community_louvain\n",
    "from tqdm import tqdm\n",
    "\n",
    "\n",
    "\n",
    "# Some common path abstraction for visualization\n",
    "replace_dic={\n",
    "    '/run/shm/':'/run/shm/*',\n",
    "#     '/home/admin/.cache/mozilla/firefox/pe11scpa.default/cache2/entries/':'/home/admin/.cache/mozilla/firefox/pe11scpa.default/cache2/entries/*',\n",
    "   '/home/admin/.cache/mozilla/firefox/':'/home/admin/.cache/mozilla/firefox/*',\n",
    "    '/home/admin/.mozilla/firefox':'/home/admin/.mozilla/firefox*',    \n",
    "    '/data/replay_logdb/':'/data/replay_logdb/*', \n",
    "    '/home/admin/.local/share/applications/':'/home/admin/.local/share/applications/*', \n",
    "    \n",
    "    '/usr/share/applications/':'/usr/share/applications/*', \n",
    "    '/lib/x86_64-linux-gnu/':'/lib/x86_64-linux-gnu/*',     \n",
    "    '/proc/':'/proc/*', \n",
    "     '/stat':'*/stat', \n",
    "    '/etc/bash_completion.d/':'/etc/bash_completion.d/*', \n",
    "    '/usr/bin/python2.7':'/usr/bin/python2.7/*', \n",
    "     '/usr/lib/python2.7':'/usr/lib/python2.7/*', \n",
    "'/data/data/org.mozilla.fennec_firefox_dev/cache/':'/data/data/org.mozilla.fennec_firefox_dev/cache/*',\n",
    "    'UNNAMED':'UNNAMED *',\n",
    "}\n",
    "\n",
    "\n",
    "def replace_path_name(path_name):\n",
    "    for i in replace_dic:\n",
    "        if i in path_name:\n",
    "            return replace_dic[i]\n",
    "    return path_name\n",
    "\n",
    "\n",
    "# Users should manually put the detected anomalous time windows here\n",
    "attack_list = [\n",
    "    'graph_5_15/2019-05-15 14:07:59.753000000~2019-05-15 14:23:00.265000000.txt',\n",
    "    'graph_5_15/2019-05-15 14:23:00.265000000~2019-05-15 14:38:02.135000000.txt',\n",
    "    'graph_5_15/2019-05-15 15:38:59.175000000~2019-05-15 15:55:38.955000000.txt', \n",
    "    'graph_5_15/2019-05-15 15:55:38.955000000~2019-05-15 16:11:27.687000000.txt',    \n",
    "\n",
    "    'graph_5_17/2019-05-17 14:50:52.897000000~2019-05-17 15:06:00.371000000.txt', \n",
    "    'graph_5_17/2019-05-17 15:06:00.371000000~2019-05-17 15:21:40.474000000.txt', \n",
    "    'graph_5_17/2019-05-17 15:21:40.474000000~2019-05-17 15:36:41.964000000.txt', \n",
    "    'graph_5_17/2019-05-17 15:51:43.493000000~2019-05-17 16:06:44.953000000.txt', \n",
    "    'graph_5_17/2019-05-17 16:21:46.485000000~2019-05-17 16:36:47.843000000.txt', \n",
    "    'graph_5_17/2019-05-17 16:36:47.843000000~2019-05-17 16:51:49.352000000.txt'\n",
    "]\n",
    "\n",
    "original_edges_count = 0\n",
    "graphs = []\n",
    "gg = nx.DiGraph()\n",
    "count = 0\n",
    "for path in tqdm(attack_list):\n",
    "    if \".txt\" in path:\n",
    "        line_count = 0\n",
    "        node_set = set()\n",
    "        tempg = nx.DiGraph()\n",
    "        f = open(path, \"r\")\n",
    "        edge_list = []\n",
    "        for line in f:\n",
    "            count += 1\n",
    "            l = line.strip()\n",
    "            jdata = eval(l)\n",
    "            edge_list.append(jdata)\n",
    "\n",
    "        edge_list = sorted(edge_list, key=lambda x: x['loss'], reverse=True)\n",
    "        original_edges_count += len(edge_list)\n",
    "\n",
    "        loss_list = []\n",
    "        for i in edge_list:\n",
    "            loss_list.append(i['loss'])\n",
    "        loss_mean = mean(loss_list)\n",
    "        loss_std = std(loss_list)\n",
    "        print(loss_mean)\n",
    "        print(loss_std)\n",
    "        thr = loss_mean + 1.5 * loss_std\n",
    "        print(\"thr:\", thr)\n",
    "        for e in edge_list:\n",
    "            if e['loss'] > thr:\n",
    "                tempg.add_edge(str(hashgen(replace_path_name(e['srcmsg']))),\n",
    "                               str(hashgen(replace_path_name(e['dstmsg']))))\n",
    "                gg.add_edge(str(hashgen(replace_path_name(e['srcmsg']))), str(hashgen(replace_path_name(e['dstmsg']))),\n",
    "                            loss=e['loss'], srcmsg=e['srcmsg'], dstmsg=e['dstmsg'], edge_type=e['edge_type'],\n",
    "                            time=e['time'])\n",
    "\n",
    "\n",
    "partition = community_louvain.best_partition(gg.to_undirected())\n",
    "\n",
    "# Generate the candidate subgraphs based on community discovery results\n",
    "communities = {}\n",
    "max_partition = 0\n",
    "for i in partition:\n",
    "    if partition[i] > max_partition:\n",
    "        max_partition = partition[i]\n",
    "for i in range(max_partition + 1):\n",
    "    communities[i] = nx.DiGraph()\n",
    "for e in gg.edges:\n",
    "    communities[partition[e[0]]].add_edge(e[0], e[1])\n",
    "    communities[partition[e[1]]].add_edge(e[0], e[1])\n",
    "\n",
    "\n",
    "# Define the attack nodes. They are **only be used to plot the colors of attack nodes and edges**.\n",
    "# They won't change the detection results.\n",
    "def attack_edge_flag(msg):\n",
    "    attack_edge_type=[\n",
    "    'barephone-instr.apk',\n",
    "        'screencap-instr.apk',\n",
    "           'de.belu.appstarter',\n",
    "        './run_webserver.sh',\n",
    "        'appstarter-instr.apk',\n",
    "        'screenshot.png',\n",
    "        'screenshot',\n",
    "        '/dev/msm_g711tlaw',\n",
    "        'com.android.providers.contacts',\n",
    "        'barephone',\n",
    "        'busybox',\n",
    "        'screencap',\n",
    " \n",
    "        '/data/local/tmp',\n",
    "        'calllog.db',\n",
    "        'calendar.db',        \n",
    "        'external.db',\n",
    "        'internal.db',\n",
    "        'lastAccess.db',\n",
    "        'mmssms.db',\n",
    "    ]\n",
    "    flag=False\n",
    "    for i in attack_edge_type:\n",
    "        if i in msg:\n",
    "            flag=True\n",
    "    return flag\n",
    "\n",
    "\n",
    "# Plot and render candidate subgraph\n",
    "os.system(f\"mkdir -p ./graph_visual/\")\n",
    "graph_index = 0\n",
    "for c in communities:\n",
    "    dot = Digraph(name=\"MyPicture\", comment=\"the test\", format=\"pdf\")\n",
    "    dot.graph_attr['rankdir'] = 'LR'\n",
    "\n",
    "    for e in communities[c].edges:\n",
    "        try:\n",
    "            temp_edge = gg.edges[e]\n",
    "            srcnode = e['srcnode']\n",
    "            dstnode = e['dstnode']\n",
    "        except:\n",
    "            pass\n",
    "\n",
    "        if True:\n",
    "            # source node\n",
    "            if \"'subject': '\" in temp_edge['srcmsg']:\n",
    "                src_shape = 'box'\n",
    "            elif \"'file': '\" in temp_edge['srcmsg']:\n",
    "                src_shape = 'oval'\n",
    "            elif \"'netflow': '\" in temp_edge['srcmsg']:\n",
    "                src_shape = 'diamond'\n",
    "            if attack_edge_flag(temp_edge['srcmsg']):\n",
    "                src_node_color = 'red'\n",
    "            else:\n",
    "                src_node_color = 'blue'\n",
    "            dot.node(name=str(hashgen(replace_path_name(temp_edge['srcmsg']))), label=str(\n",
    "                replace_path_name(temp_edge['srcmsg']) + str(\n",
    "                    partition[str(hashgen(replace_path_name(temp_edge['srcmsg'])))])), color=src_node_color,\n",
    "                     shape=src_shape)\n",
    "\n",
    "            # destination node\n",
    "            if \"'subject': '\" in temp_edge['dstmsg']:\n",
    "                dst_shape = 'box'\n",
    "            elif \"'file': '\" in temp_edge['dstmsg']:\n",
    "                dst_shape = 'oval'\n",
    "            elif \"'netflow': '\" in temp_edge['dstmsg']:\n",
    "                dst_shape = 'diamond'\n",
    "            if attack_edge_flag(temp_edge['dstmsg']):\n",
    "                dst_node_color = 'red'\n",
    "            else:\n",
    "                dst_node_color = 'blue'\n",
    "            dot.node(name=str(hashgen(replace_path_name(temp_edge['dstmsg']))), label=str(\n",
    "                replace_path_name(temp_edge['dstmsg']) + str(\n",
    "                    partition[str(hashgen(replace_path_name(temp_edge['dstmsg'])))])), color=dst_node_color,\n",
    "                     shape=dst_shape)\n",
    "\n",
    "            if attack_edge_flag(temp_edge['srcmsg']) and attack_edge_flag(temp_edge['dstmsg']):\n",
    "                edge_color = 'red'\n",
    "            else:\n",
    "                edge_color = 'blue'\n",
    "            dot.edge(str(hashgen(replace_path_name(temp_edge['srcmsg']))),\n",
    "                     str(hashgen(replace_path_name(temp_edge['dstmsg']))), label=temp_edge['edge_type'],\n",
    "                     color=edge_color)\n",
    "\n",
    "    dot.render(f'./graph_visual/subgraph_' + str(graph_index), view=False)\n",
    "    graph_index += 1"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.9.16"
  },
  "latex_envs": {
   "LaTeX_envs_menu_present": true,
   "autoclose": false,
   "autocomplete": true,
   "bibliofile": "biblio.bib",
   "cite_by": "apalike",
   "current_citInitial": 1,
   "eqLabelWithNumbers": true,
   "eqNumInitial": 1,
   "hotkeys": {
    "equation": "Ctrl-E",
    "itemize": "Ctrl-I"
   },
   "labels_anchors": false,
   "latex_user_defs": false,
   "report_style_numbering": false,
   "user_envs_cfg": false
  },
  "toc": {
   "base_numbering": 1,
   "nav_menu": {},
   "number_sections": true,
   "sideBar": true,
   "skip_h1_title": false,
   "title_cell": "Table of Contents",
   "title_sidebar": "Contents",
   "toc_cell": false,
   "toc_position": {
    "height": "calc(100% - 180px)",
    "left": "10px",
    "top": "150px",
    "width": "225.797px"
   },
   "toc_section_display": true,
   "toc_window_display": true
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
