{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "nodemapping ==>  {0: 'f2', 1: 0, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0, 7: 0, 8: 0, 9: 0, 10: 1, 11: 'f3', 12: 2, 13: 1, 14: 1, 15: 1, 16: 'f3', 17: 1, 18: 2, 19: 2, 20: 1, 21: 'f3', 22: 1, 23: 2, 24: 2, 25: 2, 26: 2, 27: 1, 28: 1, 29: 1, 30: 1, 31: 1, 32: 'f3', 33: 1, 34: 2, 35: 2, 36: 2, 37: 2, 38: 2, 39: 2, 40: 2, 41: 2, 42: 2, 43: 2, 44: 2, 45: 2, 46: 2, 47: 2, 48: 1, 49: 1, 50: 1, 51: 1, 52: 1, 53: 'f0', 54: 2, 55: 1, 56: 1, 57: 2, 58: 1, 59: 2, 60: 2, 61: 2, 62: 2, 63: 2, 64: 2, 65: 1} \n",
      "\n",
      "edges ===> [(0, 1, 1.7), (0, 2, 1.4), (0, 3, 1.6), (0, 4, 1.3), (0, 5, 1.5), (0, 6, 1.1), (0, 7, 1.2), (0, 8, 1.0), (0, 9, 1.9), (0, 10, 4.7), (0, 11, 4.5), (11, 12, 1.7), (11, 13, 1.5), (11, 14, 1.6), (11, 15, 1.3), (0, 16, 4.9), (16, 17, 1.5), (16, 18, 2.0), (16, 19, 1.8), (0, 20, 4.0), (0, 21, 5.0), (21, 22, 1.7), (21, 23, 2.0), (21, 24, 1.9), (21, 25, 1.5), (0, 26, 6.0), (0, 27, 3.5), (0, 28, 3.0), (0, 29, 4.6), (0, 30, 4.4), (0, 31, 4.1), (0, 32, 5.1), (32, 33, 1.6), (32, 34, 2.0), (32, 35, 2.4), (32, 36, 1.9), (32, 37, 1.5), (32, 38, 2.3), (32, 39, 1.8), (0, 40, 5.9), (0, 41, 5.6), (0, 42, 5.5), (0, 43, 5.4), (0, 44, 6.6), (0, 45, 6.1), (0, 46, 6.9), (0, 47, 6.4), (0, 48, 3.6), (0, 49, 3.3), (0, 50, 3.8), (0, 51, 3.7), (0, 52, 4.2), (0, 53, 4.8), (53, 54, 6.0), (53, 55, 5.9), (53, 56, 6.8), (53, 57, 6.2), (0, 58, 4.3), (0, 59, 5.8), (0, 60, 5.3), (0, 61, 5.7), (0, 62, 5.2), (0, 63, 6.3), (0, 64, 6.7), (0, 65, 3.9)]\n",
      "\n",
      "original_data: {'f0': 5.9, 'f1': 3.0, 'f2': 5.1, 'f3': 1.8, 'label': 2}\n",
      "original_path: [0, 32, 39]  predict_label: 2\n",
      "attack_path: [0, 32, 33]  attack_label: 1\n"
     ]
    }
   ],
   "source": [
    "import math\n",
    "import copy\n",
    "\n",
    "# Get iris\n",
    "def get_iris():\n",
    "    from sklearn import datasets\n",
    "    iris = datasets.load_iris()\n",
    "    X = iris.data \n",
    "    y = iris.target\n",
    "\n",
    "    data_iris = []\n",
    "    for i in range(len(X)):\n",
    "        dict = {}\n",
    "        dict['f0'] = X[i][0]\n",
    "        dict['f1'] = X[i][1]\n",
    "        dict['f2'] = X[i][2]\n",
    "        dict['f3'] = X[i][3]\n",
    "\n",
    "        dict['label'] = y[i]\n",
    "        data_iris.append(dict)\n",
    "    return data_iris\n",
    "    \n",
    "data = get_iris()\n",
    "label = 'label'\n",
    "\n",
    "\n",
    "def entropy(data, label):\n",
    "    cl = {}\n",
    "    for x in data:\n",
    "        if x[label] in cl:\n",
    "            cl[x[label]] += 1\n",
    "        else:\n",
    "            cl[x[label]] = 1\n",
    "    tot_cnt = sum(cl.values())\n",
    "    return sum([ -1 * (float(cl[x])/tot_cnt) * math.log2(float(cl[x])/tot_cnt) for x in cl])\n",
    "\n",
    "\n",
    "def findInformationGain(data, label, column, entropyParent):\n",
    "    keys = { i[column] for i in data }\n",
    "    entropies = {}\n",
    "    count = {}\n",
    "    avgEntropy = 0\n",
    "    for val in keys:\n",
    "        modData = [ x for x in data if x[column] == val]\n",
    "        entropies[val] = entropy(modData, label)\n",
    "        count[val] = len(modData)\n",
    "        avgEntropy += (entropies[val] * count[val])\n",
    "\n",
    "    tot_cnt = sum(count.values())\n",
    "    avgEntropy /= tot_cnt\n",
    "    return entropyParent - avgEntropy\n",
    "\n",
    "node = 0\n",
    "nodeMapping = {}\n",
    "edges = []\n",
    "\n",
    "def makeDecisionTree(data, label, parent=-1, branch=''):\n",
    "\n",
    "    global node, nodeMapping\n",
    "    if parent >= 0:\n",
    "        edges.append((parent, node, branch))\n",
    "\n",
    "    #find the variable(column) with maximum information gain\n",
    "    infoGain = []\n",
    "    columns = [x for x in data[0]]\n",
    "    for column in columns:\n",
    "        if not(column == label):\n",
    "            ent = entropy(data, label)\n",
    "            infoGain.append((findInformationGain(data, label, column, ent), column))\n",
    "    splitColumn = max(infoGain)[1]\n",
    "\n",
    "    # Leaf node, final result, if maximum information gain is not significant\n",
    "    if max(infoGain)[0] < 0.01:\n",
    "        nodeMapping[node] = data[0][label]\n",
    "        node += 1\n",
    "        return\n",
    "    nodeMapping[node] = splitColumn\n",
    "    parent = node\n",
    "    node += 1\n",
    "    branchs = { i[splitColumn] for i in data }#All out-going edges from current node\n",
    "    for branch in branchs:\n",
    "\n",
    "        # Create sub table under the current decision branch\n",
    "        modData = [x for x in data if splitColumn in x and x[splitColumn] == branch]\n",
    "        for y in modData:\n",
    "            if splitColumn in y:\n",
    "                del y[splitColumn]\n",
    "\n",
    "        # Create sub-tree\n",
    "        makeDecisionTree(modData, label, parent, branch)\n",
    "\n",
    "makeDecisionTree(data, label)\n",
    "\n",
    "print('nodemapping ==> ', nodeMapping, '\\n\\nedges ===>', edges)\n",
    "\n",
    "\n",
    "path = []\n",
    "label_x = None\n",
    "\n",
    "# QUERY\n",
    "def query(i, data_x):\n",
    "    global path, label_x\n",
    "    path.append(i)\n",
    "    next_q = False\n",
    "    for e in edges:\n",
    "        if e[0]==i:\n",
    "            next_q=True\n",
    "            break\n",
    "        \n",
    "    if next_q:\n",
    "        for e in edges:\n",
    "            if e[0]==i and e[2]==data_x[str(nodeMapping[i])]:\n",
    "                i = e[1]\n",
    "                query(i, data_x)\n",
    "                break\n",
    "    else:\n",
    "        label_x = nodeMapping[i]\n",
    "        \n",
    "data_x = get_iris()[149]\n",
    "query(0, data_x)\n",
    "print()\n",
    "print('original_data:', data_x)\n",
    "print('original_path:',path,' predict_label:', label_x)\n",
    "\n",
    "# ATTACK\n",
    "attack_label = None\n",
    "attack_path = None\n",
    "\n",
    "def judge_e(i):\n",
    "    next_ = False\n",
    "    for e in edges:\n",
    "        if e[0]==i:\n",
    "            next_=True\n",
    "            break\n",
    "    return next_\n",
    "    \n",
    "def atk_path(path_,i):\n",
    "    global attack_label, attack_path\n",
    "    for e in edges:\n",
    "        ppath = copy.deepcopy(path_)\n",
    "        if e[0]==i:\n",
    "            ppath.append(e[1])\n",
    "            if judge_e(e[1]):\n",
    "                atk_path(ppath,e[1])\n",
    "            elif nodeMapping[e[1]]!=label_x and attack_label==None:\n",
    "                attack_path = ppath\n",
    "                attack_label = nodeMapping[e[1]]\n",
    "            \n",
    "def attack():\n",
    "    for i in range(1,len(path)):\n",
    "        atk_path(path[:-i],path[-1-i])\n",
    "        if attack_label != None:\n",
    "            break\n",
    "            \n",
    "attack()\n",
    "print('attack_path:',attack_path,' attack_label:', attack_label)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.7"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
