{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2020-02-26T15:18:22.742443Z",
     "start_time": "2020-02-26T15:18:22.733929Z"
    }
   },
   "outputs": [],
   "source": [
    "import pickle\n",
    "import math\n",
    "import time\n",
    "import argparse\n",
    "import random\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "import networkx as nx\n",
    "import torch\n",
    "import torch.nn as nn\n",
    "import torch.optim as optim\n",
    "from torch.nn.parameter import Parameter\n",
    "from torch.nn.functional import gumbel_softmax\n",
    "from utils import gumbel_softmax_3d\n",
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2020-02-26T15:18:23.451543Z",
     "start_time": "2020-02-26T15:18:23.448499Z"
    }
   },
   "outputs": [],
   "source": [
    "def load_data(path):\n",
    "    with open(path, 'rb') as f:\n",
    "        data = pickle.load(f)\n",
    "    G = nx.from_dict_of_lists(data)\n",
    "    return G"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2020-02-26T15:18:25.714635Z",
     "start_time": "2020-02-26T15:18:25.702785Z"
    }
   },
   "outputs": [],
   "source": [
    "def train(args, G):\n",
    "    bs = args.batch_size\n",
    "    n = G.number_of_nodes()\n",
    "    A = nx.to_numpy_matrix(G)\n",
    "    A = torch.from_numpy(A)\n",
    "    A = A.type(torch.float32)\n",
    "    best_log = []\n",
    "    A = A.cuda()\n",
    "\n",
    "    # set parameters\n",
    "    if torch.cuda.is_available():\n",
    "        A = A.cuda()\n",
    "        x = torch.rand(bs, n, 1, device='cuda')\n",
    "        x.requires_grad = True\n",
    "    else:\n",
    "        x = torch.randn(bs, n, 1, requires_grad=True)\n",
    "\n",
    "    # set optimizer\n",
    "    optimizer = torch.optim.Adam([x], lr=args.lr)\n",
    "\n",
    "    # training\n",
    "    cost_arr = []\n",
    "    for _ in range(args.iterations):\n",
    "        optimizer.zero_grad()\n",
    "        if torch.cuda.is_available():\n",
    "            probs = torch.empty(bs, n, 2, device='cuda')\n",
    "        else:\n",
    "            probs = torch.empty(bs, n, 2)\n",
    "        #p = torch.sigmoid(x)\n",
    "        p = x\n",
    "        probs[:, :, 0] = p.squeeze()\n",
    "        probs[:, :, -1] = 1-probs[:, :, 0]\n",
    "        logits = torch.log(probs+1e-10)\n",
    "        s = gumbel_softmax_3d(probs, tau=args.tau, hard=args.hard)[:, :, 0]\n",
    "        s = torch.unsqueeze(s, -1)  # size [bs, n, 1]\n",
    "        cost = torch.sum(s)\n",
    "        constraint = torch.sum((1-torch.transpose(s, 1, 2)) @ A @ (1-s))\n",
    "        loss = cost + args.eta * constraint\n",
    "        loss.backward()\n",
    "        optimizer.step()\n",
    "\n",
    "        with torch.no_grad():\n",
    "\n",
    "            constraint = torch.squeeze((1-torch.transpose(s, 1, 2)) @ A @ (1-s))\n",
    "            constraint = constraint.cpu().numpy()\n",
    "            idx = np.argwhere(constraint == 0)  # select constraint=0\n",
    "            if len(idx) != 0:\n",
    "\n",
    "                cost = torch.sum(s, dim=1)[idx.reshape(-1,)]\n",
    "                # from size [bs, 1] select constrain=0\n",
    "                cost_arr.append(torch.min(cost.cpu()))\n",
    "                #print('#',_,':',s_[torch.argmin(cost),:,0],'#',torch.min(cost))\n",
    "                \n",
    "            if _ % 100 == 0:\n",
    "                print(_)\n",
    "                if len(cost_arr) != 0:\n",
    "                    print('# {}, cost: {}'.format(_, ((np.sort(cost_arr))[0:8])))\n",
    "                    #print(x.data[torch.argmin(loss),:,0])\n",
    "                else:\n",
    "                    print('Failed!')\n",
    "                    #print(x.data[torch.argmin(loss),:,0])\n",
    "\n",
    "    return cost_arr"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2020-02-26T15:22:06.509996Z",
     "start_time": "2020-02-26T15:21:12.908972Z"
    },
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "cuda\n",
      "0\n",
      "Failed!\n",
      "100\n",
      "Failed!\n",
      "200\n",
      "Failed!\n",
      "300\n",
      "Failed!\n",
      "400\n",
      "Failed!\n",
      "500\n",
      "Failed!\n",
      "600\n",
      "Failed!\n",
      "700\n",
      "Failed!\n",
      "800\n",
      "Failed!\n",
      "900\n",
      "Failed!\n",
      "1000\n",
      "Failed!\n",
      "1100\n",
      "Failed!\n",
      "1200\n",
      "Failed!\n",
      "1300\n",
      "Failed!\n",
      "1400\n",
      "# 1400, cost: [1651.]\n",
      "1500\n",
      "# 1500, cost: [1651.]\n",
      "1600\n",
      "# 1600, cost: [1651.]\n",
      "1700\n",
      "# 1700, cost: [1606. 1651.]\n",
      "1800\n",
      "# 1800, cost: [1606. 1651.]\n",
      "1900\n",
      "# 1900, cost: [1606. 1651.]\n",
      "2000\n",
      "# 2000, cost: [1606. 1651.]\n",
      "2100\n",
      "# 2100, cost: [1582. 1606. 1651.]\n",
      "2200\n",
      "# 2200, cost: [1582. 1589. 1606. 1651.]\n",
      "2300\n",
      "# 2300, cost: [1582. 1589. 1606. 1651.]\n",
      "2400\n",
      "# 2400, cost: [1580. 1580. 1582. 1589. 1591. 1593. 1606. 1651.]\n",
      "2500\n",
      "# 2500, cost: [1575. 1580. 1580. 1581. 1582. 1589. 1591. 1593.]\n",
      "2600\n",
      "# 2600, cost: [1575. 1580. 1580. 1581. 1582. 1589. 1591. 1593.]\n",
      "2700\n",
      "# 2700, cost: [1570. 1573. 1575. 1579. 1580. 1580. 1581. 1582.]\n",
      "2800\n",
      "# 2800, cost: [1570. 1573. 1575. 1579. 1580. 1580. 1581. 1582.]\n",
      "2900\n",
      "# 2900, cost: [1566. 1569. 1570. 1573. 1575. 1578. 1579. 1580.]\n",
      "3000\n",
      "# 3000, cost: [1565. 1566. 1569. 1570. 1573. 1574. 1575. 1576.]\n",
      "3100\n",
      "# 3100, cost: [1565. 1566. 1569. 1570. 1573. 1574. 1575. 1576.]\n",
      "3200\n",
      "# 3200, cost: [1565. 1566. 1569. 1570. 1573. 1574. 1574. 1575.]\n",
      "3300\n",
      "# 3300, cost: [1565. 1566. 1569. 1570. 1573. 1574. 1574. 1575.]\n",
      "3400\n",
      "# 3400, cost: [1565. 1566. 1569. 1570. 1573. 1574. 1574. 1575.]\n",
      "3500\n",
      "# 3500, cost: [1565. 1566. 1569. 1570. 1573. 1574. 1574. 1575.]\n",
      "3600\n",
      "# 3600, cost: [1565. 1566. 1569. 1570. 1573. 1574. 1574. 1575.]\n",
      "3700\n",
      "# 3700, cost: [1565. 1566. 1569. 1570. 1573. 1574. 1574. 1574.]\n",
      "3800\n",
      "# 3800, cost: [1565. 1566. 1566. 1569. 1570. 1573. 1573. 1574.]\n",
      "3900\n",
      "# 3900, cost: [1558. 1565. 1566. 1566. 1569. 1570. 1573. 1573.]\n",
      "4000\n",
      "# 4000, cost: [1558. 1565. 1566. 1566. 1569. 1569. 1570. 1571.]\n",
      "4100\n",
      "# 4100, cost: [1558. 1565. 1566. 1566. 1569. 1569. 1570. 1571.]\n",
      "4200\n",
      "# 4200, cost: [1558. 1565. 1566. 1566. 1569. 1569. 1570. 1570.]\n",
      "4300\n",
      "# 4300, cost: [1558. 1565. 1565. 1566. 1566. 1569. 1569. 1570.]\n",
      "4400\n",
      "# 4400, cost: [1558. 1565. 1565. 1566. 1566. 1569. 1569. 1570.]\n",
      "4500\n",
      "# 4500, cost: [1558. 1565. 1565. 1566. 1566. 1569. 1569. 1570.]\n",
      "4600\n",
      "# 4600, cost: [1558. 1563. 1565. 1565. 1566. 1566. 1569. 1569.]\n",
      "4700\n",
      "# 4700, cost: [1558. 1563. 1565. 1565. 1566. 1566. 1569. 1569.]\n",
      "4800\n",
      "# 4800, cost: [1558. 1563. 1565. 1565. 1566. 1566. 1569. 1569.]\n",
      "4900\n",
      "# 4900, cost: [1558. 1563. 1565. 1565. 1566. 1566. 1569. 1569.]\n",
      "5000\n",
      "# 5000, cost: [1558. 1563. 1565. 1565. 1566. 1566. 1569. 1569.]\n",
      "5100\n",
      "# 5100, cost: [1558. 1559. 1563. 1565. 1565. 1566. 1566. 1569.]\n",
      "5200\n",
      "# 5200, cost: [1558. 1559. 1563. 1565. 1565. 1566. 1566. 1569.]\n",
      "5300\n",
      "# 5300, cost: [1558. 1559. 1563. 1565. 1565. 1566. 1566. 1566.]\n",
      "5400\n",
      "# 5400, cost: [1558. 1559. 1563. 1565. 1565. 1566. 1566. 1566.]\n",
      "5500\n",
      "# 5500, cost: [1558. 1559. 1563. 1564. 1565. 1565. 1566. 1566.]\n",
      "5600\n",
      "# 5600, cost: [1558. 1559. 1563. 1564. 1565. 1565. 1566. 1566.]\n",
      "5700\n",
      "# 5700, cost: [1558. 1559. 1563. 1564. 1565. 1565. 1566. 1566.]\n",
      "5800\n",
      "# 5800, cost: [1558. 1559. 1562. 1563. 1564. 1565. 1565. 1565.]\n",
      "5900\n",
      "# 5900, cost: [1558. 1559. 1562. 1563. 1564. 1565. 1565. 1565.]\n",
      "6000\n",
      "# 6000, cost: [1558. 1559. 1562. 1562. 1563. 1564. 1565. 1565.]\n",
      "6100\n",
      "# 6100, cost: [1558. 1559. 1562. 1562. 1563. 1564. 1565. 1565.]\n",
      "6200\n",
      "# 6200, cost: [1558. 1559. 1562. 1562. 1563. 1564. 1565. 1565.]\n",
      "6300\n",
      "# 6300, cost: [1558. 1559. 1562. 1562. 1563. 1564. 1565. 1565.]\n",
      "6400\n",
      "# 6400, cost: [1558. 1559. 1562. 1562. 1563. 1563. 1564. 1565.]\n",
      "6500\n",
      "# 6500, cost: [1558. 1559. 1562. 1562. 1563. 1563. 1564. 1565.]\n",
      "6600\n",
      "# 6600, cost: [1558. 1559. 1562. 1562. 1563. 1563. 1564. 1565.]\n",
      "6700\n",
      "# 6700, cost: [1558. 1559. 1562. 1562. 1563. 1563. 1564. 1565.]\n",
      "6800\n",
      "# 6800, cost: [1558. 1559. 1562. 1562. 1563. 1563. 1564. 1565.]\n",
      "6900\n",
      "# 6900, cost: [1558. 1559. 1562. 1562. 1563. 1563. 1564. 1565.]\n",
      "7000\n",
      "# 7000, cost: [1558. 1559. 1562. 1562. 1563. 1563. 1564. 1565.]\n",
      "7100\n",
      "# 7100, cost: [1558. 1559. 1562. 1562. 1563. 1563. 1564. 1565.]\n",
      "7200\n",
      "# 7200, cost: [1558. 1559. 1562. 1562. 1563. 1563. 1564. 1565.]\n",
      "7300\n",
      "# 7300, cost: [1558. 1559. 1562. 1562. 1563. 1563. 1564. 1564.]\n",
      "7400\n",
      "# 7400, cost: [1558. 1559. 1562. 1562. 1563. 1563. 1564. 1564.]\n",
      "7500\n",
      "# 7500, cost: [1558. 1558. 1559. 1562. 1562. 1563. 1563. 1564.]\n",
      "7600\n",
      "# 7600, cost: [1558. 1558. 1559. 1562. 1562. 1563. 1563. 1564.]\n",
      "7700\n",
      "# 7700, cost: [1558. 1558. 1559. 1562. 1562. 1563. 1563. 1564.]\n",
      "7800\n",
      "# 7800, cost: [1558. 1558. 1559. 1562. 1562. 1563. 1563. 1564.]\n",
      "7900\n",
      "# 7900, cost: [1558. 1558. 1559. 1562. 1562. 1563. 1563. 1564.]\n",
      "8000\n",
      "# 8000, cost: [1558. 1558. 1559. 1562. 1562. 1563. 1563. 1564.]\n",
      "8100\n",
      "# 8100, cost: [1558. 1558. 1559. 1562. 1562. 1563. 1563. 1564.]\n",
      "8200\n",
      "# 8200, cost: [1558. 1558. 1559. 1562. 1562. 1563. 1563. 1564.]\n",
      "8300\n",
      "# 8300, cost: [1558. 1558. 1559. 1562. 1562. 1563. 1563. 1564.]\n",
      "8400\n",
      "# 8400, cost: [1558. 1558. 1558. 1558. 1559. 1562. 1562. 1563.]\n",
      "8500\n",
      "# 8500, cost: [1558. 1558. 1558. 1558. 1559. 1562. 1562. 1563.]\n",
      "8600\n",
      "# 8600, cost: [1558. 1558. 1558. 1558. 1559. 1562. 1562. 1563.]\n",
      "8700\n",
      "# 8700, cost: [1558. 1558. 1558. 1558. 1559. 1562. 1562. 1563.]\n",
      "8800\n",
      "# 8800, cost: [1558. 1558. 1558. 1558. 1559. 1562. 1562. 1563.]\n",
      "8900\n",
      "# 8900, cost: [1558. 1558. 1558. 1558. 1559. 1562. 1562. 1563.]\n",
      "9000\n",
      "# 9000, cost: [1558. 1558. 1558. 1558. 1559. 1562. 1562. 1563.]\n",
      "9100\n",
      "# 9100, cost: [1558. 1558. 1558. 1558. 1559. 1562. 1562. 1563.]\n",
      "9200\n",
      "# 9200, cost: [1558. 1558. 1558. 1558. 1559. 1562. 1562. 1563.]\n",
      "9300\n",
      "# 9300, cost: [1558. 1558. 1558. 1558. 1559. 1562. 1562. 1563.]\n",
      "9400\n",
      "# 9400, cost: [1558. 1558. 1558. 1558. 1559. 1562. 1562. 1563.]\n",
      "9500\n",
      "# 9500, cost: [1558. 1558. 1558. 1558. 1559. 1562. 1562. 1563.]\n",
      "9600\n",
      "# 9600, cost: [1558. 1558. 1558. 1558. 1559. 1562. 1562. 1563.]\n",
      "9700\n",
      "# 9700, cost: [1558. 1558. 1558. 1558. 1559. 1562. 1562. 1563.]\n",
      "9800\n",
      "# 9800, cost: [1558. 1558. 1558. 1558. 1559. 1562. 1562. 1563.]\n",
      "9900\n",
      "# 9900, cost: [1558. 1558. 1558. 1558. 1559. 1562. 1562. 1563.]\n",
      "# 0, cost: 1558.0\n",
      "0\n",
      "Failed!\n",
      "100\n",
      "Failed!\n",
      "200\n",
      "Failed!\n",
      "300\n",
      "Failed!\n",
      "400\n",
      "Failed!\n",
      "500\n",
      "Failed!\n",
      "600\n",
      "Failed!\n",
      "700\n",
      "Failed!\n",
      "800\n",
      "Failed!\n",
      "900\n",
      "Failed!\n",
      "1000\n",
      "Failed!\n",
      "1100\n",
      "Failed!\n",
      "1200\n",
      "Failed!\n",
      "1300\n",
      "Failed!\n",
      "1400\n",
      "Failed!\n",
      "1500\n",
      "Failed!\n",
      "1600\n",
      "Failed!\n",
      "1700\n",
      "Failed!\n",
      "1800\n",
      "# 1800, cost: [1621.]\n",
      "1900\n",
      "# 1900, cost: [1621.]\n",
      "2000\n",
      "# 2000, cost: [1621.]\n",
      "2100\n",
      "# 2100, cost: [1591. 1621.]\n",
      "2200\n",
      "# 2200, cost: [1591. 1599. 1621.]\n",
      "2300\n",
      "# 2300, cost: [1580. 1582. 1591. 1599. 1621.]\n",
      "2400\n",
      "# 2400, cost: [1580. 1581. 1582. 1591. 1599. 1621.]\n",
      "2500\n",
      "# 2500, cost: [1580. 1581. 1582. 1582. 1591. 1599. 1621.]\n",
      "2600\n",
      "# 2600, cost: [1580. 1581. 1581. 1582. 1582. 1591. 1599. 1621.]\n",
      "2700\n",
      "# 2700, cost: [1573. 1573. 1579. 1580. 1581. 1581. 1582. 1582.]\n",
      "2800\n",
      "# 2800, cost: [1573. 1573. 1579. 1580. 1581. 1581. 1581. 1582.]\n",
      "2900\n",
      "# 2900, cost: [1568. 1573. 1573. 1577. 1579. 1580. 1581. 1581.]\n",
      "3000\n",
      "# 3000, cost: [1568. 1573. 1573. 1577. 1579. 1580. 1581. 1581.]\n",
      "3100\n",
      "# 3100, cost: [1567. 1568. 1571. 1573. 1573. 1575. 1577. 1578.]\n",
      "3200\n",
      "# 3200, cost: [1567. 1568. 1571. 1573. 1573. 1575. 1577. 1578.]\n",
      "3300\n",
      "# 3300, cost: [1567. 1568. 1571. 1573. 1573. 1574. 1575. 1577.]\n",
      "3400\n",
      "# 3400, cost: [1567. 1568. 1571. 1573. 1573. 1574. 1575. 1577.]\n",
      "3500\n",
      "# 3500, cost: [1567. 1568. 1571. 1573. 1573. 1573. 1574. 1575.]\n",
      "3600\n",
      "# 3600, cost: [1567. 1568. 1571. 1573. 1573. 1573. 1574. 1575.]\n",
      "3700\n",
      "# 3700, cost: [1567. 1568. 1571. 1573. 1573. 1573. 1574. 1575.]\n",
      "3800\n",
      "# 3800, cost: [1567. 1568. 1571. 1573. 1573. 1573. 1574. 1575.]\n",
      "3900\n",
      "# 3900, cost: [1567. 1568. 1568. 1571. 1573. 1573. 1573. 1574.]\n",
      "4000\n",
      "# 4000, cost: [1567. 1568. 1568. 1570. 1571. 1573. 1573. 1573.]\n",
      "4100\n",
      "# 4100, cost: [1567. 1568. 1568. 1570. 1571. 1573. 1573. 1573.]\n",
      "4200\n",
      "# 4200, cost: [1567. 1568. 1568. 1570. 1571. 1573. 1573. 1573.]\n",
      "4300\n",
      "# 4300, cost: [1567. 1568. 1568. 1568. 1570. 1571. 1573. 1573.]\n",
      "4400\n",
      "# 4400, cost: [1567. 1568. 1568. 1568. 1570. 1571. 1573. 1573.]\n",
      "4500\n",
      "# 4500, cost: [1567. 1568. 1568. 1568. 1570. 1570. 1571. 1573.]\n",
      "4600\n",
      "# 4600, cost: [1567. 1568. 1568. 1568. 1570. 1570. 1571. 1573.]\n",
      "4700\n",
      "# 4700, cost: [1567. 1568. 1568. 1568. 1570. 1570. 1570. 1571.]\n",
      "4800\n",
      "# 4800, cost: [1567. 1568. 1568. 1568. 1570. 1570. 1570. 1570.]\n",
      "4900\n",
      "# 4900, cost: [1567. 1568. 1568. 1568. 1570. 1570. 1570. 1570.]\n",
      "5000\n",
      "# 5000, cost: [1567. 1568. 1568. 1568. 1568. 1570. 1570. 1570.]\n",
      "5100\n",
      "# 5100, cost: [1561. 1567. 1568. 1568. 1568. 1568. 1570. 1570.]\n",
      "5200\n",
      "# 5200, cost: [1561. 1567. 1568. 1568. 1568. 1568. 1570. 1570.]\n",
      "5300\n",
      "# 5300, cost: [1561. 1567. 1568. 1568. 1568. 1568. 1570. 1570.]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "5400\n",
      "# 5400, cost: [1561. 1567. 1568. 1568. 1568. 1568. 1570. 1570.]\n",
      "5500\n",
      "# 5500, cost: [1561. 1567. 1568. 1568. 1568. 1568. 1570. 1570.]\n",
      "5600\n",
      "# 5600, cost: [1561. 1567. 1568. 1568. 1568. 1568. 1570. 1570.]\n",
      "5700\n",
      "# 5700, cost: [1561. 1567. 1568. 1568. 1568. 1568. 1570. 1570.]\n",
      "5800\n",
      "# 5800, cost: [1561. 1567. 1568. 1568. 1568. 1568. 1570. 1570.]\n",
      "5900\n",
      "# 5900, cost: [1558. 1561. 1567. 1568. 1568. 1568. 1568. 1568.]\n",
      "6000\n",
      "# 6000, cost: [1558. 1561. 1563. 1567. 1568. 1568. 1568. 1568.]\n",
      "6100\n",
      "# 6100, cost: [1558. 1561. 1563. 1564. 1567. 1568. 1568. 1568.]\n",
      "6200\n",
      "# 6200, cost: [1558. 1561. 1563. 1564. 1567. 1568. 1568. 1568.]\n",
      "6300\n",
      "# 6300, cost: [1558. 1561. 1561. 1563. 1564. 1567. 1568. 1568.]\n",
      "6400\n",
      "# 6400, cost: [1558. 1561. 1561. 1563. 1564. 1567. 1568. 1568.]\n",
      "6500\n",
      "# 6500, cost: [1558. 1561. 1561. 1563. 1564. 1567. 1568. 1568.]\n",
      "6600\n",
      "# 6600, cost: [1558. 1561. 1561. 1563. 1564. 1567. 1568. 1568.]\n",
      "6700\n",
      "# 6700, cost: [1558. 1561. 1561. 1563. 1564. 1567. 1568. 1568.]\n",
      "6800\n",
      "# 6800, cost: [1558. 1561. 1561. 1563. 1564. 1567. 1568. 1568.]\n",
      "6900\n",
      "# 6900, cost: [1558. 1561. 1561. 1563. 1564. 1566. 1567. 1568.]\n",
      "7000\n",
      "# 7000, cost: [1558. 1561. 1561. 1563. 1564. 1566. 1567. 1568.]\n",
      "7100\n",
      "# 7100, cost: [1558. 1561. 1561. 1563. 1564. 1566. 1567. 1568.]\n",
      "7200\n",
      "# 7200, cost: [1558. 1561. 1561. 1563. 1564. 1566. 1567. 1567.]\n",
      "7300\n",
      "# 7300, cost: [1558. 1561. 1561. 1563. 1564. 1565. 1566. 1567.]\n",
      "7400\n",
      "# 7400, cost: [1558. 1561. 1561. 1563. 1564. 1565. 1566. 1567.]\n",
      "7500\n",
      "# 7500, cost: [1558. 1561. 1561. 1563. 1564. 1564. 1565. 1566.]\n",
      "7600\n",
      "# 7600, cost: [1558. 1561. 1561. 1563. 1564. 1564. 1565. 1566.]\n",
      "7700\n",
      "# 7700, cost: [1558. 1561. 1561. 1563. 1564. 1564. 1565. 1566.]\n",
      "7800\n",
      "# 7800, cost: [1558. 1561. 1561. 1563. 1564. 1564. 1565. 1566.]\n",
      "7900\n",
      "# 7900, cost: [1558. 1561. 1561. 1563. 1564. 1564. 1565. 1566.]\n"
     ]
    },
    {
     "ename": "KeyboardInterrupt",
     "evalue": "",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-15-0835b3f2cf5f>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m     39\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     40\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0m__name__\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'__main__'\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 41\u001b[0;31m     \u001b[0mmain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
      "\u001b[0;32m<ipython-input-15-0835b3f2cf5f>\u001b[0m in \u001b[0;36mmain\u001b[0;34m()\u001b[0m\n\u001b[1;32m     31\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     32\u001b[0m     \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mensemble\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 33\u001b[0;31m         \u001b[0mcost\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtrain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mG\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     34\u001b[0m         \u001b[0;32mif\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcost\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m!=\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     35\u001b[0m             \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'# {}, cost: {}'\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mformat\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mmin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mcost\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m<ipython-input-12-09f8e224b123>\u001b[0m in \u001b[0;36mtrain\u001b[0;34m(args, G)\u001b[0m\n\u001b[1;32m     37\u001b[0m         \u001b[0mconstraint\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msum\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtranspose\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0ms\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m@\u001b[0m \u001b[0mA\u001b[0m \u001b[0;34m@\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0ms\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     38\u001b[0m         \u001b[0mloss\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcost\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0margs\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0meta\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mconstraint\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 39\u001b[0;31m         \u001b[0mloss\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbackward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     40\u001b[0m         \u001b[0moptimizer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mstep\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     41\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/data/liyaoxin/anaconda3/lib/python3.6/site-packages/torch/tensor.py\u001b[0m in \u001b[0;36mbackward\u001b[0;34m(self, gradient, retain_graph, create_graph)\u001b[0m\n\u001b[1;32m    193\u001b[0m                 \u001b[0mproducts\u001b[0m\u001b[0;34m.\u001b[0m \u001b[0mDefaults\u001b[0m \u001b[0mto\u001b[0m\u001b[0;31m \u001b[0m\u001b[0;31m`\u001b[0m\u001b[0;31m`\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;31m`\u001b[0m\u001b[0;31m`\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    194\u001b[0m         \"\"\"\n\u001b[0;32m--> 195\u001b[0;31m         \u001b[0mtorch\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mautograd\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mbackward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mgradient\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mretain_graph\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcreate_graph\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    196\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    197\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0mregister_hook\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhook\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/data/liyaoxin/anaconda3/lib/python3.6/site-packages/torch/autograd/__init__.py\u001b[0m in \u001b[0;36mbackward\u001b[0;34m(tensors, grad_tensors, retain_graph, create_graph, grad_variables)\u001b[0m\n\u001b[1;32m     97\u001b[0m     Variable._execution_engine.run_backward(\n\u001b[1;32m     98\u001b[0m         \u001b[0mtensors\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mgrad_tensors\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mretain_graph\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcreate_graph\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 99\u001b[0;31m         allow_unreachable=True)  # allow_unreachable flag\n\u001b[0m\u001b[1;32m    100\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    101\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
     ]
    }
   ],
   "source": [
    "def main():\n",
    "    # Training settings\n",
    "    parser = argparse.ArgumentParser(\n",
    "        description='Solving MVC problems (with fixed tau in GS, parallel version)')\n",
    "    parser.add_argument('--batch-size', type=int, default=128,\n",
    "                        help='batch size (default: 128)')\n",
    "    parser.add_argument('--data', type=str, default='citeseer',\n",
    "                        help='data name (default: cora)')\n",
    "    parser.add_argument('--tau', type=float, default=1.,\n",
    "                        help='tau value in Gumbel-softmax (default: 1)')\n",
    "    parser.add_argument('--hard', type=bool, default=True,\n",
    "                        help='hard sampling in Gumbel-softmax (default: True)')\n",
    "    parser.add_argument('--lr', type=float, default=1e-2,\n",
    "                        help='learning rate (default: 1e-2)')\n",
    "    parser.add_argument('--eta', type=float, default=2.5,\n",
    "                        help='constraint (default: 5)')\n",
    "    parser.add_argument('--ensemble', type=int, default=10,\n",
    "                        help='# experiments (default: 100)')\n",
    "    parser.add_argument('--iterations', type=int, default=10000,\n",
    "                        help='# iterations in gradient descent (default: 20000)')\n",
    "    parser.add_argument('--seed', type=int, default=1, help='random seed (default: 1)')\n",
    "    args = parser.parse_args(args=[])\n",
    "\n",
    "    # torch.manual_seed(args.seed)\n",
    "    use_cuda = torch.cuda.is_available()\n",
    "    device = torch.device(\"cuda\" if use_cuda else \"cpu\")\n",
    "    print(device)\n",
    "\n",
    "    # loading data\n",
    "    G = load_data('./data/ind.' + args.data + '.graph')\n",
    "\n",
    "    for i in range(args.ensemble):\n",
    "        cost = train(args, G)\n",
    "        if len(cost) != 0:\n",
    "            print('# {}, cost: {}'.format(i, min(cost)))\n",
    "        else:\n",
    "            print('Failed!')\n",
    "\n",
    "\n",
    "if __name__ == '__main__':\n",
    "    main()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.10"
  },
  "toc": {
   "base_numbering": 1,
   "nav_menu": {},
   "number_sections": true,
   "sideBar": true,
   "skip_h1_title": false,
   "title_cell": "Table of Contents",
   "title_sidebar": "Contents",
   "toc_cell": false,
   "toc_position": {},
   "toc_section_display": true,
   "toc_window_display": false
  },
  "varInspector": {
   "cols": {
    "lenName": 16,
    "lenType": 16,
    "lenVar": 40
   },
   "kernels_config": {
    "python": {
     "delete_cmd_postfix": "",
     "delete_cmd_prefix": "del ",
     "library": "var_list.py",
     "varRefreshCmd": "print(var_dic_list())"
    },
    "r": {
     "delete_cmd_postfix": ") ",
     "delete_cmd_prefix": "rm(",
     "library": "var_list.r",
     "varRefreshCmd": "cat(var_dic_list()) "
    }
   },
   "types_to_exclude": [
    "module",
    "function",
    "builtin_function_or_method",
    "instance",
    "_Feature"
   ],
   "window_display": false
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
