{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-06-21T06:12:10.869380Z",
     "start_time": "2021-06-21T06:11:58.338833Z"
    }
   },
   "outputs": [],
   "source": [
    "import torch\n",
    "from torch import Tensor\n",
    "from torch.nn import Parameter\n",
    "from torch_scatter import scatter_add\n",
    "from torch_geometric.nn.conv import MessagePassing\n",
    "from torch_geometric.utils import add_remaining_self_loops, add_self_loops, degree\n",
    "from torch_geometric.utils.num_nodes import maybe_num_nodes\n",
    "from torch_geometric.data import Data\n",
    "import torch.nn.functional as F\n",
    "\n",
    "from torch_geometric.nn.inits import glorot, zeros"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-06-21T06:12:15.168671Z",
     "start_time": "2021-06-21T06:12:15.083693Z"
    }
   },
   "outputs": [],
   "source": [
    "# 固定种子\n",
    "seed = 1234\n",
    "torch.manual_seed(seed)\n",
    "torch.cuda.manual_seed(seed)\n",
    "torch.cuda.manual_seed_all(seed)  \n",
    "np.random.seed(seed)  # Numpy module.\n",
    "# random.seed(seed)  # Python random module.\n",
    "torch.manual_seed(seed)\n",
    "torch.backends.cudnn.benchmark = False\n",
    "torch.backends.cudnn.deterministic = True"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# cora数据集"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-06-21T06:21:45.286488Z",
     "start_time": "2021-06-21T06:21:44.194052Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Data(edge_index=[2, 13566], label_feature=[2708, 7], x=[2708, 1433], y=[2708])"
      ]
     },
     "execution_count": 19,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "path = \"data/cora/\"\n",
    "cites = path + \"cora.cites\"\n",
    "content = path + \"cora.content\"\n",
    "\n",
    "\n",
    "# 索引字典，转换到从0开始编码\n",
    "index_dict = dict()\n",
    "# 标签字典\n",
    "label_to_index = dict()\n",
    "\n",
    "features = []\n",
    "labels = []\n",
    "edge_index = []\n",
    "one_hot_labels = []\n",
    "\n",
    "num_label = 7\n",
    "\n",
    "with open(content,\"r\") as f:\n",
    "    nodes = f.readlines()\n",
    "    for node in nodes:\n",
    "        node_info = node.split()\n",
    "        index_dict[int(node_info[0])] = len(index_dict)\n",
    "        features.append([int(i) for i in node_info[1:-1]])\n",
    "        \n",
    "        label_str = node_info[-1]\n",
    "        if(label_str not in label_to_index.keys()):\n",
    "            label_to_index[label_str] = len(label_to_index)\n",
    "        one_hot_label = [0 for i in range(num_label)]\n",
    "        one_hot_label[label_to_index[label_str]] = 1\n",
    "        labels.append(label_to_index[label_str])\n",
    "        one_hot_labels.append(one_hot_label)\n",
    "        \n",
    "\n",
    "with open(cites,\"r\") as f:\n",
    "    edges = f.readlines()\n",
    "    for edge in edges:\n",
    "        start, end = edge.split()\n",
    "        edge_index.append([index_dict[int(start)],index_dict[int(end)]])\n",
    "        edge_index.append([index_dict[int(end)],index_dict[int(start)]])\n",
    "    \n",
    "labels = torch.LongTensor(labels)\n",
    "one_hot_labels = torch.FloatTensor(one_hot_labels)\n",
    "features = torch.FloatTensor(features)\n",
    "# 行归一化\n",
    "features = torch.nn.functional.normalize(features, p=1, dim=1)\n",
    "\n",
    "edge_index =  torch.LongTensor(edge_index).t()\n",
    "\n",
    "# 增加自环\n",
    "num_nodes = len(index_dict)\n",
    "edge_index, _ = add_remaining_self_loops(edge_index, num_nodes = num_nodes)\n",
    "\n",
    "# 添加自环, GCN会默认添加自环, 这里不需要额外处理\n",
    "# edge_index, _ = add_self_loops(edge_index,num_nodes=len(index_dict))\n",
    "\n",
    "device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')\n",
    "data = Data(x = features, label_feature = one_hot_labels, edge_index = edge_index.contiguous(), y = labels).to(device)\n",
    "data"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# citeseer数据集"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-06-21T06:26:38.698208Z",
     "start_time": "2021-06-21T06:26:35.360579Z"
    }
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Data(edge_index=[2, 12494], label_feature=[3312, 6], x=[3312, 3703], y=[3312])"
      ]
     },
     "execution_count": 29,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "path = \"data/citeseer/\"\n",
    "cites = path + \"citeseer.cites\"\n",
    "content = path + \"citeseer.content\"\n",
    "\n",
    "\n",
    "# 索引字典，转换到从0开始编码\n",
    "index_dict = dict()\n",
    "# 标签字典\n",
    "label_to_index = dict()\n",
    "\n",
    "features = []\n",
    "labels = []\n",
    "edge_index = []\n",
    "one_hot_labels = []\n",
    "\n",
    "num_label = 6\n",
    "\n",
    "with open(content,\"r\") as f:\n",
    "    nodes = f.readlines()\n",
    "    for node in nodes:\n",
    "        node_info = node.split()\n",
    "        index_dict[node_info[0]] = len(index_dict)\n",
    "        features.append([int(i) for i in node_info[1:-1]])\n",
    "        \n",
    "        label_str = node_info[-1]\n",
    "        if(label_str not in label_to_index.keys()):\n",
    "            label_to_index[label_str] = len(label_to_index)\n",
    "        one_hot_label = [0 for i in range(num_label)]\n",
    "        one_hot_label[label_to_index[label_str]] = 1\n",
    "        labels.append(label_to_index[label_str])\n",
    "        one_hot_labels.append(one_hot_label)\n",
    "        \n",
    "\n",
    "with open(cites,\"r\") as f:\n",
    "    edges = f.readlines()\n",
    "    for edge in edges:\n",
    "        try:\n",
    "            start, end = edge.split()\n",
    "            edge_index.append([index_dict[start],index_dict[end]])\n",
    "            edge_index.append([index_dict[end],index_dict[start]])\n",
    "        except:\n",
    "            pass\n",
    "    \n",
    "labels = torch.LongTensor(labels)\n",
    "one_hot_labels = torch.FloatTensor(one_hot_labels)\n",
    "features = torch.FloatTensor(features)\n",
    "# 行归一化\n",
    "# features = torch.nn.functional.normalize(features, p=1, dim=1)\n",
    "\n",
    "edge_index =  torch.LongTensor(edge_index).t()\n",
    "\n",
    "num_nodes = len(index_dict)\n",
    "edge_index, _ = add_remaining_self_loops(edge_index, num_nodes = num_nodes)\n",
    "\n",
    "# 添加自环, GCN会默认添加自环, 这里不需要额外处理\n",
    "# edge_index, _ = add_self_loops(edge_index,num_nodes=len(index_dict))\n",
    "\n",
    "device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')\n",
    "data = Data(x = features, label_feature = one_hot_labels, edge_index = edge_index.contiguous(), y = labels).to(device)\n",
    "data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-06-21T06:12:38.055124Z",
     "start_time": "2021-06-21T06:12:38.043916Z"
    }
   },
   "outputs": [],
   "source": [
    "def gcn_norm(edge_index, edge_weight=None, num_nodes=None, improved=False,\n",
    "             add_self_loops=True, dtype=None):\n",
    "\n",
    "    fill_value = 2. if improved else 1.\n",
    "    \n",
    "    # 获取节点数\n",
    "    num_nodes = maybe_num_nodes(edge_index, num_nodes)\n",
    "\n",
    "\n",
    "#     # 添加自环，如果imporved为真，自环权重为2，否则为1\n",
    "#     if add_self_loops:\n",
    "#         edge_index, tmp_edge_weight = add_remaining_self_loops(\n",
    "#             edge_index, edge_weight, fill_value, num_nodes)\n",
    "#         assert tmp_edge_weight is not None\n",
    "#         edge_weight = tmp_edge_weight\n",
    "\n",
    "    edge_weight_ones = torch.ones((edge_index.size(1), ), dtype=dtype,\n",
    "                 device=edge_index.device)\n",
    "        \n",
    "    # 归一化\n",
    "    row, col = edge_index[0], edge_index[1]\n",
    "#     deg = degree(col)\n",
    "    deg = scatter_add(edge_weight, col, dim=0, dim_size=num_nodes)\n",
    "#     deg_inv_sqrt = deg.pow_(-0.5)\n",
    "#     deg_inv_sqrt.masked_fill_(deg_inv_sqrt == float('inf'), 0)\n",
    "#     return edge_index, deg_inv_sqrt[row] * edge_weight * deg_inv_sqrt[col]\n",
    "\n",
    "    deg_inv_sqrt = deg.pow_(-1)\n",
    "    deg_inv_sqrt.masked_fill_(deg_inv_sqrt == float('inf'), 0)\n",
    "    return edge_index, edge_weight * deg_inv_sqrt[col]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-06-21T06:12:38.384537Z",
     "start_time": "2021-06-21T06:12:38.371408Z"
    }
   },
   "outputs": [],
   "source": [
    "def lpa_norm(edge_index, edge_weight=None, num_nodes=None, improved=False,\n",
    "             add_self_loops=False, dtype=None):\n",
    "\n",
    "    fill_value = 2. if improved else 1.\n",
    "    \n",
    "    num_nodes = maybe_num_nodes(edge_index, num_nodes)\n",
    "\n",
    "    # 如果边是无权的，全设为1\n",
    "    if edge_weight is None:\n",
    "        edge_weight = torch.ones((edge_index.size(1), ), dtype=dtype,\n",
    "                                 device=edge_index.device)\n",
    "    \n",
    "    # 添加自环\n",
    "    if add_self_loops:\n",
    "        edge_index, tmp_edge_weight = add_remaining_self_loops(\n",
    "            edge_index, edge_weight, fill_value, num_nodes)\n",
    "        assert tmp_edge_weight is not None\n",
    "        edge_weight = tmp_edge_weight\n",
    "        \n",
    "    edge_weight_ones = torch.ones((edge_index.size(1), ), dtype=dtype,\n",
    "                     device=edge_index.device)\n",
    "\n",
    "    row, col = edge_index[0], edge_index[1]\n",
    "    deg = degree(col)\n",
    "#     deg = scatter_add(edge_weight, col, dim=0, dim_size=num_nodes)\n",
    "#     deg_inv_sqrt = deg.pow_(-1)\n",
    "#     deg_inv_sqrt.masked_fill_(deg_inv_sqrt == float('inf'), 0)\n",
    "#     return edge_index, deg_inv_sqrt[row] * edge_weight * deg_inv_sqrt[col]\n",
    "\n",
    "    deg_inv_sqrt = deg.pow_(-1)\n",
    "    deg_inv_sqrt.masked_fill_(deg_inv_sqrt == float('inf'), 0)\n",
    "    return edge_index, edge_weight * deg_inv_sqrt[col]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-06-21T06:12:38.748110Z",
     "start_time": "2021-06-21T06:12:38.739839Z"
    }
   },
   "outputs": [],
   "source": [
    "class LableProp(MessagePassing):\n",
    "    def __init__(self, cached = True, normalize = True):\n",
    "        super(LableProp, self).__init__(aggr='add')\n",
    "        \n",
    "        self._cached_edge_index = None\n",
    "        self._cached_adj_t = None\n",
    "        self.normalize = normalize\n",
    "        self.cached = cached\n",
    "        \n",
    "    def forward(self, x, edge_index, edge_weight):\n",
    "        if self.normalize:\n",
    "            cache = self._cached_edge_index\n",
    "            if cache is None:\n",
    "                if self.cached:\n",
    "                    self._cached_edge_index = edge_index\n",
    "            else:\n",
    "                edge_index = cache\n",
    "            \n",
    "            edge_index, edge_weight = lpa_norm(  # yapf: disable\n",
    "                    edge_index, edge_weight,\n",
    "                    improved = False, add_self_loops = True)\n",
    "\n",
    "        out = self.propagate(edge_index, x=x, edge_weight=edge_weight)\n",
    "        return out\n",
    "\n",
    "    \n",
    "    def message(self, x_j, edge_weight) -> Tensor:\n",
    "#         print(edge_weight)\n",
    "        return x_j if edge_weight is None else edge_weight.view(-1, 1) * x_j\n",
    "        "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-06-21T06:12:39.113654Z",
     "start_time": "2021-06-21T06:12:39.103934Z"
    }
   },
   "outputs": [],
   "source": [
    "class LabelNet(torch.nn.Module):\n",
    "    def __init__(self, num_edge):\n",
    "        super(LabelNet,self).__init__()\n",
    "        self.num_edge = num_edge\n",
    "        self.edge_weight = Parameter(torch.ones(self.num_edge))\n",
    "        self.LP1 = LableProp()\n",
    "        self.LP2 = LableProp()\n",
    "        self.LP3 = LableProp()\n",
    "        self.LP4 = LableProp()\n",
    "        self.LP5 = LableProp()\n",
    "        \n",
    "        \n",
    "    def forward(self, data, train_mask):\n",
    "        x = data.label_feature\n",
    "        edge_index = data.edge_index\n",
    "        temp_x = x\n",
    "        \n",
    "        x = self.LP1(x, edge_index, self.edge_weight)    \n",
    "        x[train_mask] = temp_x[train_mask]\n",
    "        x = self.LP2(x, edge_index, self.edge_weight)\n",
    "        x[train_mask] = temp_x[train_mask]\n",
    "        x = self.LP3(x, edge_index, self.edge_weight)\n",
    "        x[train_mask] = temp_x[train_mask]\n",
    "        x = self.LP4(x, edge_index, self.edge_weight)\n",
    "        x[train_mask] = temp_x[train_mask]\n",
    "        x = self.LP5(x, edge_index, self.edge_weight)\n",
    "#         x[train_mask] = temp_x[train_mask]\n",
    "#         print(x)\n",
    "        \n",
    "        return F.log_softmax(x, dim=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-06-21T06:12:39.519949Z",
     "start_time": "2021-06-21T06:12:39.504989Z"
    }
   },
   "outputs": [],
   "source": [
    "class GCNConv(MessagePassing):\n",
    "    def __init__(self, in_channels, out_channels,\n",
    "                 improved = False, cached = False,\n",
    "                 add_self_loops = True, normalize= True,\n",
    "                 bias = True, **kwargs):\n",
    "\n",
    "        kwargs.setdefault('aggr', 'add')\n",
    "        super(GCNConv, self).__init__(**kwargs)\n",
    "\n",
    "        self.in_channels = in_channels\n",
    "        self.out_channels = out_channels\n",
    "        self.improved = improved\n",
    "        self.cached = cached\n",
    "        self.add_self_loops = add_self_loops\n",
    "        self.normalize = normalize\n",
    "\n",
    "        self._cached_edge_index = None\n",
    "        self._cached_adj_t = None\n",
    "\n",
    "        self.weight = Parameter(torch.Tensor(in_channels, out_channels))\n",
    "\n",
    "        if bias:\n",
    "            self.bias = Parameter(torch.Tensor(out_channels))\n",
    "\n",
    "        self.reset_parameters()\n",
    "\n",
    "    # 初始化参数\n",
    "    def reset_parameters(self):\n",
    "        glorot(self.weight)\n",
    "        zeros(self.bias)\n",
    "        self._cached_edge_index = None\n",
    "        self._cached_adj_t = None\n",
    "\n",
    "\n",
    "    def forward(self, x, edge_index,\n",
    "                edge_weight= None):\n",
    "        \n",
    "        # 如果边是无权的，全设为1\n",
    "        if edge_weight is None:\n",
    "            edge_weight = torch.ones((edge_index.size(1), ), device=edge_index.device)\n",
    "\n",
    "        edge_index, edge_weight = gcn_norm(  # yapf: disable\n",
    "            edge_index, edge_weight, x.size(self.node_dim),\n",
    "            self.improved, self.add_self_loops)\n",
    "        \n",
    "        x = x @ self.weight\n",
    "\n",
    "        # propagate_type: (x: Tensor, edge_weight: OptTensor)\n",
    "        out = self.propagate(edge_index, x=x, edge_weight=edge_weight,\n",
    "                             size=None)\n",
    "\n",
    "        if self.bias is not None:\n",
    "            out += self.bias\n",
    "\n",
    "        return out\n",
    "\n",
    "    def message(self, x_j, edge_weight) -> Tensor:\n",
    "        return x_j if edge_weight is None else edge_weight.view(-1, 1) * x_j\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-06-21T06:12:40.007217Z",
     "start_time": "2021-06-21T06:12:39.995973Z"
    }
   },
   "outputs": [],
   "source": [
    "class GCNNet(torch.nn.Module):\n",
    "    def __init__(self, num_feature, num_label):\n",
    "        super(GCNNet,self).__init__()\n",
    "        self.GCN1 = GCNConv(num_feature, 16, cached=False)\n",
    "        self.GCN2 = GCNConv(16, num_label, cached=False)  \n",
    "        self.dropout = torch.nn.Dropout(p=0.5)\n",
    "        \n",
    "    def forward(self, data, edge_weight =None):\n",
    "        x, edge_index = data.x, data.edge_index\n",
    "        \n",
    "        x = self.GCN1(x, edge_index, edge_weight)\n",
    "        x = F.relu(x)\n",
    "        x = self.dropout(x)\n",
    "        x = self.GCN2(x, edge_index, edge_weight)\n",
    "        \n",
    "        return F.log_softmax(x, dim=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-06-21T06:12:40.350502Z",
     "start_time": "2021-06-21T06:12:40.339546Z"
    }
   },
   "outputs": [],
   "source": [
    "class GCN_LPA(torch.nn.Module):\n",
    "    def __init__(self, num_feature, num_edge, num_label):\n",
    "        super(GCN_LPA,self).__init__()\n",
    "        self.GCN = GCNNet(num_feature,num_label)\n",
    "        self.LPA = LabelNet(num_edge)\n",
    "\n",
    "    def forward(self, data, train_mask):\n",
    "        x2 = self.LPA(data, train_mask)\n",
    "        \n",
    "        edge_weight = torch.Tensor(self.LPA.edge_weight.clone().to('cpu').detach()).to(device)\n",
    "        \n",
    "        x1 = self.GCN(data, edge_weight=edge_weight)\n",
    "        \n",
    "        return F.log_softmax(x1, dim=1), F.log_softmax(x2, dim=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 60,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-06-21T06:35:50.296466Z",
     "start_time": "2021-06-21T06:35:50.281099Z"
    }
   },
   "outputs": [],
   "source": [
    "# 随机划分\n",
    "mask = torch.randperm(len(index_dict))\n",
    "# 顺序划分\n",
    "# mask = torch.LongTensor([i for i in range(len(index_dict))])\n",
    "train_mask = mask[:int(features.shape[0]*0.6)]\n",
    "unlabeled_mask = mask[int(features.shape[0]*0.6):]\n",
    "val_mask = mask[int(features.shape[0]*0.6):int(features.shape[0]*0.8)]\n",
    "test_mask = mask[int(features.shape[0]*0.8):]\n",
    "\n",
    "# 清除未标记的标签\n",
    "data.label_feature = one_hot_labels.to(device)\n",
    "data.label_feature[unlabeled_mask] = torch.zeros(num_label).to(device)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 61,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-06-21T06:35:50.717265Z",
     "start_time": "2021-06-21T06:35:50.449880Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch: 0 loss: 1.1786\n",
      "epoch: 1 loss: 1.1794\n",
      "epoch: 2 loss: 1.1801\n",
      "epoch: 3 loss: 1.1808\n",
      "epoch: 4 loss: 1.1816\n",
      "epoch: 5 loss: 1.1823\n",
      "epoch: 6 loss: 1.1830\n",
      "epoch: 7 loss: 1.1838\n",
      "epoch: 8 loss: 1.1845\n",
      "epoch: 9 loss: 1.1852\n",
      "Accuracy: 0.6667\n",
      "epoch: 10 loss: 1.1860\n",
      "epoch: 11 loss: 1.1867\n",
      "epoch: 12 loss: 1.1874\n",
      "epoch: 13 loss: 1.1882\n",
      "epoch: 14 loss: 1.1889\n",
      "epoch: 15 loss: 1.1896\n",
      "epoch: 16 loss: 1.1903\n",
      "epoch: 17 loss: 1.1911\n",
      "epoch: 18 loss: 1.1918\n",
      "epoch: 19 loss: 1.1925\n",
      "Accuracy: 0.6637\n",
      "Parameter containing:\n",
      "tensor([0.9800, 0.9800, 0.9800,  ..., 0.9800, 0.9800, 0.9800], device='cuda:0',\n",
      "       requires_grad=True)\n"
     ]
    }
   ],
   "source": [
    "model = LabelNet(data.edge_index.shape[1]).to(device)\n",
    "optimizer = torch.optim.Adam(model.parameters(), lr=1e-3, weight_decay=5e-4)\n",
    "\n",
    "for epoch in range(20):\n",
    "    model.train()\n",
    "    optimizer.zero_grad()\n",
    "    out = model(data ,train_mask)\n",
    "#     print(out)\n",
    "    loss = F.nll_loss(out[train_mask], data.y[train_mask])\n",
    "    print('epoch: %d loss: %.4f' %(epoch, loss))\n",
    "    loss.backward()\n",
    "    optimizer.step()\n",
    "    \n",
    "    if((epoch + 1)% 10 == 0):\n",
    "        model.eval()\n",
    "        _, pred = model(data ,train_mask).max(dim=1)\n",
    "        correct = int(pred[test_mask].eq(data.y[test_mask]).sum().item())\n",
    "        acc = correct / len(test_mask)\n",
    "        print('Accuracy: {:.4f}'.format(acc))\n",
    "\n",
    "print(model.edge_weight)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 43,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-06-21T06:33:35.741189Z",
     "start_time": "2021-06-21T06:33:25.841376Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Parameter containing:\n",
      "tensor([1., 1., 1.,  ..., 1., 1., 1.], device='cuda:0', requires_grad=True)\n",
      "epoch: 0 loss: 1.7947 1.4243 3.2190 \n",
      "epoch: 1 loss: 1.7777 1.4247 3.2024 \n",
      "epoch: 2 loss: 1.7602 1.4252 3.1854 \n",
      "epoch: 3 loss: 1.7487 1.4257 3.1743 \n",
      "epoch: 4 loss: 1.7334 1.4261 3.1595 \n",
      "epoch: 5 loss: 1.7154 1.4266 3.1419 \n",
      "epoch: 6 loss: 1.6982 1.4270 3.1252 \n",
      "epoch: 7 loss: 1.6767 1.4275 3.1042 \n",
      "epoch: 8 loss: 1.6568 1.4279 3.0847 \n",
      "epoch: 9 loss: 1.6364 1.4284 3.0648 \n",
      "val_accuracy: 0.7190\n",
      "epoch: 10 loss: 1.6180 1.4289 3.0469 \n",
      "epoch: 11 loss: 1.5992 1.4293 3.0285 \n",
      "epoch: 12 loss: 1.5752 1.4298 3.0050 \n",
      "epoch: 13 loss: 1.5478 1.4302 2.9781 \n",
      "epoch: 14 loss: 1.5246 1.4307 2.9553 \n",
      "epoch: 15 loss: 1.5069 1.4311 2.9380 \n",
      "epoch: 16 loss: 1.4855 1.4316 2.9171 \n",
      "epoch: 17 loss: 1.4602 1.4320 2.8922 \n",
      "epoch: 18 loss: 1.4511 1.4325 2.8835 \n",
      "epoch: 19 loss: 1.4263 1.4329 2.8592 \n",
      "val_accuracy: 0.7553\n",
      "epoch: 20 loss: 1.4106 1.4334 2.8440 \n",
      "epoch: 21 loss: 1.3754 1.4338 2.8093 \n",
      "epoch: 22 loss: 1.3648 1.4343 2.7991 \n",
      "epoch: 23 loss: 1.3441 1.4348 2.7788 \n",
      "epoch: 24 loss: 1.3261 1.4352 2.7613 \n",
      "epoch: 25 loss: 1.3077 1.4357 2.7433 \n",
      "epoch: 26 loss: 1.2864 1.4361 2.7225 \n",
      "epoch: 27 loss: 1.2778 1.4366 2.7144 \n",
      "epoch: 28 loss: 1.2427 1.4370 2.6797 \n",
      "epoch: 29 loss: 1.2348 1.4374 2.6722 \n",
      "val_accuracy: 0.7613\n",
      "epoch: 30 loss: 1.2137 1.4379 2.6516 \n",
      "epoch: 31 loss: 1.1980 1.4383 2.6363 \n",
      "epoch: 32 loss: 1.1764 1.4388 2.6151 \n",
      "epoch: 33 loss: 1.1632 1.4392 2.6024 \n",
      "epoch: 34 loss: 1.1487 1.4397 2.5884 \n",
      "epoch: 35 loss: 1.1433 1.4401 2.5834 \n",
      "epoch: 36 loss: 1.1137 1.4406 2.5543 \n",
      "epoch: 37 loss: 1.0996 1.4410 2.5406 \n",
      "epoch: 38 loss: 1.0929 1.4415 2.5344 \n",
      "epoch: 39 loss: 1.0651 1.4419 2.5070 \n",
      "val_accuracy: 0.7644\n",
      "epoch: 40 loss: 1.0525 1.4423 2.4949 \n",
      "epoch: 41 loss: 1.0412 1.4428 2.4840 \n",
      "epoch: 42 loss: 1.0197 1.4432 2.4629 \n",
      "epoch: 43 loss: 1.0223 1.4437 2.4659 \n",
      "epoch: 44 loss: 1.0089 1.4441 2.4530 \n",
      "epoch: 45 loss: 0.9885 1.4446 2.4331 \n",
      "epoch: 46 loss: 0.9776 1.4450 2.4226 \n",
      "epoch: 47 loss: 0.9656 1.4454 2.4110 \n",
      "epoch: 48 loss: 0.9549 1.4459 2.4008 \n",
      "epoch: 49 loss: 0.9611 1.4463 2.4074 \n",
      "val_accuracy: 0.7628\n",
      "epoch: 50 loss: 0.9410 1.4467 2.3877 \n",
      "epoch: 51 loss: 0.9184 1.4472 2.3656 \n",
      "epoch: 52 loss: 0.9050 1.4476 2.3526 \n",
      "epoch: 53 loss: 0.9021 1.4481 2.3501 \n",
      "epoch: 54 loss: 0.8971 1.4485 2.3456 \n",
      "epoch: 55 loss: 0.8764 1.4489 2.3253 \n",
      "epoch: 56 loss: 0.8701 1.4494 2.3195 \n",
      "epoch: 57 loss: 0.8642 1.4498 2.3140 \n",
      "epoch: 58 loss: 0.8504 1.4502 2.3006 \n",
      "epoch: 59 loss: 0.8566 1.4507 2.3072 \n",
      "val_accuracy: 0.7659\n",
      "epoch: 60 loss: 0.8478 1.4511 2.2989 \n",
      "epoch: 61 loss: 0.8190 1.4515 2.2705 \n",
      "epoch: 62 loss: 0.8197 1.4520 2.2716 \n",
      "epoch: 63 loss: 0.8226 1.4524 2.2750 \n",
      "epoch: 64 loss: 0.8173 1.4528 2.2701 \n",
      "epoch: 65 loss: 0.7823 1.4533 2.2355 \n",
      "epoch: 66 loss: 0.7870 1.4537 2.2407 \n",
      "epoch: 67 loss: 0.7888 1.4541 2.2429 \n",
      "epoch: 68 loss: 0.7843 1.4545 2.2389 \n",
      "epoch: 69 loss: 0.7630 1.4550 2.2180 \n",
      "val_accuracy: 0.7659\n",
      "epoch: 70 loss: 0.7680 1.4554 2.2233 \n",
      "epoch: 71 loss: 0.7567 1.4558 2.2125 \n",
      "epoch: 72 loss: 0.7473 1.4563 2.2035 \n",
      "epoch: 73 loss: 0.7466 1.4567 2.2032 \n",
      "epoch: 74 loss: 0.7534 1.4571 2.2105 \n",
      "epoch: 75 loss: 0.7337 1.4575 2.1912 \n",
      "epoch: 76 loss: 0.7252 1.4580 2.1831 \n",
      "epoch: 77 loss: 0.7224 1.4584 2.1808 \n",
      "epoch: 78 loss: 0.7084 1.4588 2.1672 \n",
      "epoch: 79 loss: 0.7070 1.4592 2.1663 \n",
      "val_accuracy: 0.7689\n",
      "epoch: 80 loss: 0.7081 1.4596 2.1678 \n",
      "epoch: 81 loss: 0.7111 1.4601 2.1711 \n",
      "epoch: 82 loss: 0.6937 1.4605 2.1542 \n",
      "epoch: 83 loss: 0.6894 1.4609 2.1504 \n",
      "epoch: 84 loss: 0.6788 1.4613 2.1401 \n",
      "epoch: 85 loss: 0.6712 1.4617 2.1329 \n",
      "epoch: 86 loss: 0.6944 1.4622 2.1566 \n",
      "epoch: 87 loss: 0.6716 1.4626 2.1342 \n",
      "epoch: 88 loss: 0.6659 1.4630 2.1289 \n",
      "epoch: 89 loss: 0.6527 1.4634 2.1161 \n",
      "val_accuracy: 0.7689\n",
      "epoch: 90 loss: 0.6594 1.4638 2.1233 \n",
      "epoch: 91 loss: 0.6560 1.4643 2.1203 \n",
      "epoch: 92 loss: 0.6629 1.4647 2.1275 \n",
      "epoch: 93 loss: 0.6593 1.4651 2.1244 \n",
      "epoch: 94 loss: 0.6407 1.4655 2.1062 \n",
      "epoch: 95 loss: 0.6327 1.4659 2.0986 \n",
      "epoch: 96 loss: 0.6390 1.4663 2.1054 \n",
      "epoch: 97 loss: 0.6254 1.4667 2.0921 \n",
      "epoch: 98 loss: 0.6203 1.4672 2.0874 \n",
      "epoch: 99 loss: 0.6289 1.4676 2.0965 \n",
      "val_accuracy: 0.7674\n",
      "epoch: 100 loss: 0.6205 1.4680 2.0885 \n",
      "epoch: 101 loss: 0.6034 1.4684 2.0718 \n",
      "epoch: 102 loss: 0.6170 1.4688 2.0858 \n",
      "epoch: 103 loss: 0.6125 1.4692 2.0817 \n",
      "epoch: 104 loss: 0.5965 1.4696 2.0661 \n",
      "epoch: 105 loss: 0.5929 1.4700 2.0629 \n",
      "epoch: 106 loss: 0.6045 1.4704 2.0750 \n",
      "epoch: 107 loss: 0.6016 1.4709 2.0724 \n",
      "epoch: 108 loss: 0.6008 1.4713 2.0720 \n",
      "epoch: 109 loss: 0.5943 1.4717 2.0659 \n",
      "val_accuracy: 0.7704\n",
      "epoch: 110 loss: 0.5963 1.4721 2.0683 \n",
      "epoch: 111 loss: 0.5878 1.4725 2.0603 \n",
      "epoch: 112 loss: 0.5791 1.4729 2.0520 \n",
      "epoch: 113 loss: 0.5903 1.4733 2.0636 \n",
      "epoch: 114 loss: 0.5684 1.4737 2.0421 \n",
      "epoch: 115 loss: 0.5628 1.4741 2.0369 \n",
      "epoch: 116 loss: 0.5756 1.4745 2.0501 \n",
      "epoch: 117 loss: 0.5493 1.4749 2.0242 \n",
      "epoch: 118 loss: 0.5576 1.4753 2.0329 \n",
      "epoch: 119 loss: 0.5609 1.4757 2.0366 \n",
      "val_accuracy: 0.7719\n",
      "epoch: 120 loss: 0.5696 1.4761 2.0457 \n",
      "epoch: 121 loss: 0.5692 1.4765 2.0457 \n",
      "epoch: 122 loss: 0.5492 1.4769 2.0261 \n",
      "epoch: 123 loss: 0.5355 1.4773 2.0128 \n",
      "epoch: 124 loss: 0.5553 1.4777 2.0330 \n",
      "epoch: 125 loss: 0.5466 1.4781 2.0247 \n",
      "epoch: 126 loss: 0.5426 1.4785 2.0211 \n",
      "epoch: 127 loss: 0.5379 1.4789 2.0168 \n",
      "epoch: 128 loss: 0.5337 1.4793 2.0130 \n",
      "epoch: 129 loss: 0.5405 1.4797 2.0202 \n",
      "val_accuracy: 0.7719\n",
      "epoch: 130 loss: 0.5244 1.4801 2.0045 \n",
      "epoch: 131 loss: 0.5277 1.4805 2.0082 \n",
      "epoch: 132 loss: 0.5212 1.4809 2.0021 \n",
      "epoch: 133 loss: 0.5326 1.4813 2.0139 \n",
      "epoch: 134 loss: 0.5225 1.4817 2.0042 \n",
      "epoch: 135 loss: 0.5205 1.4821 2.0026 \n",
      "epoch: 136 loss: 0.5268 1.4825 2.0093 \n",
      "epoch: 137 loss: 0.5181 1.4829 2.0010 \n",
      "epoch: 138 loss: 0.5078 1.4833 1.9911 \n",
      "epoch: 139 loss: 0.5139 1.4837 1.9976 \n",
      "val_accuracy: 0.7689\n",
      "epoch: 140 loss: 0.5116 1.4840 1.9956 \n",
      "epoch: 141 loss: 0.5178 1.4844 2.0022 \n",
      "epoch: 142 loss: 0.5126 1.4848 1.9974 \n",
      "epoch: 143 loss: 0.5016 1.4852 1.9868 \n",
      "epoch: 144 loss: 0.5151 1.4856 2.0008 \n",
      "epoch: 145 loss: 0.5008 1.4860 1.9868 \n",
      "epoch: 146 loss: 0.5116 1.4864 1.9980 \n",
      "epoch: 147 loss: 0.4967 1.4868 1.9835 \n",
      "epoch: 148 loss: 0.4869 1.4872 1.9741 \n",
      "epoch: 149 loss: 0.5062 1.4876 1.9938 \n",
      "val_accuracy: 0.7704\n",
      "epoch: 150 loss: 0.5003 1.4879 1.9883 \n",
      "epoch: 151 loss: 0.4791 1.4883 1.9674 \n",
      "epoch: 152 loss: 0.4861 1.4887 1.9748 \n",
      "epoch: 153 loss: 0.4805 1.4891 1.9696 \n",
      "epoch: 154 loss: 0.4887 1.4895 1.9782 \n",
      "epoch: 155 loss: 0.4787 1.4899 1.9686 \n",
      "epoch: 156 loss: 0.4841 1.4903 1.9744 \n",
      "epoch: 157 loss: 0.4808 1.4906 1.9714 \n",
      "epoch: 158 loss: 0.4919 1.4910 1.9829 \n",
      "epoch: 159 loss: 0.4833 1.4914 1.9747 \n",
      "val_accuracy: 0.7674\n",
      "epoch: 160 loss: 0.4671 1.4918 1.9589 \n",
      "epoch: 161 loss: 0.4796 1.4922 1.9718 \n",
      "epoch: 162 loss: 0.4682 1.4925 1.9607 \n",
      "epoch: 163 loss: 0.4761 1.4929 1.9690 \n",
      "epoch: 164 loss: 0.4708 1.4933 1.9641 \n",
      "epoch: 165 loss: 0.4644 1.4937 1.9581 \n",
      "epoch: 166 loss: 0.4602 1.4941 1.9543 \n",
      "epoch: 167 loss: 0.4724 1.4944 1.9668 \n",
      "epoch: 168 loss: 0.4623 1.4948 1.9571 \n",
      "epoch: 169 loss: 0.4642 1.4952 1.9594 \n",
      "val_accuracy: 0.7628\n",
      "epoch: 170 loss: 0.4681 1.4956 1.9637 \n",
      "epoch: 171 loss: 0.4565 1.4959 1.9525 \n",
      "epoch: 172 loss: 0.4400 1.4963 1.9363 \n",
      "epoch: 173 loss: 0.4513 1.4967 1.9480 \n",
      "epoch: 174 loss: 0.4503 1.4971 1.9474 \n",
      "epoch: 175 loss: 0.4496 1.4975 1.9470 \n",
      "epoch: 176 loss: 0.4480 1.4978 1.9459 \n",
      "epoch: 177 loss: 0.4494 1.4982 1.9476 \n",
      "epoch: 178 loss: 0.4602 1.4986 1.9588 \n",
      "epoch: 179 loss: 0.4433 1.4989 1.9422 \n",
      "val_accuracy: 0.7628\n",
      "epoch: 180 loss: 0.4334 1.4993 1.9327 \n",
      "epoch: 181 loss: 0.4473 1.4997 1.9470 \n",
      "epoch: 182 loss: 0.4467 1.5001 1.9468 \n",
      "epoch: 183 loss: 0.4380 1.5004 1.9384 \n",
      "epoch: 184 loss: 0.4333 1.5008 1.9341 \n",
      "epoch: 185 loss: 0.4287 1.5012 1.9299 \n",
      "epoch: 186 loss: 0.4414 1.5015 1.9429 \n",
      "epoch: 187 loss: 0.4324 1.5019 1.9343 \n",
      "epoch: 188 loss: 0.4421 1.5023 1.9444 \n",
      "epoch: 189 loss: 0.4351 1.5026 1.9377 \n",
      "val_accuracy: 0.7628\n",
      "epoch: 190 loss: 0.4215 1.5030 1.9245 \n",
      "epoch: 191 loss: 0.4171 1.5034 1.9204 \n",
      "epoch: 192 loss: 0.4335 1.5037 1.9372 \n",
      "epoch: 193 loss: 0.4358 1.5041 1.9399 \n",
      "epoch: 194 loss: 0.4343 1.5045 1.9387 \n",
      "epoch: 195 loss: 0.4217 1.5048 1.9266 \n",
      "epoch: 196 loss: 0.4275 1.5052 1.9327 \n",
      "epoch: 197 loss: 0.4125 1.5056 1.9180 \n",
      "epoch: 198 loss: 0.4237 1.5059 1.9296 \n",
      "epoch: 199 loss: 0.4092 1.5063 1.9155 \n",
      "val_accuracy: 0.7628\n",
      "Parameter containing:\n",
      "tensor([0.8085, 0.8085, 0.8083,  ..., 0.8056, 0.8085, 0.8085], device='cuda:0',\n",
      "       requires_grad=True)\n",
      "Parameter containing:\n",
      "tensor([1., 1., 1.,  ..., 1., 1., 1.], device='cuda:0', requires_grad=True)\n",
      "epoch: 0 loss: 1.7837 1.4243 3.2080 \n",
      "epoch: 1 loss: 1.7663 1.4247 3.1911 \n",
      "epoch: 2 loss: 1.7422 1.4252 3.1674 \n",
      "epoch: 3 loss: 1.7238 1.4257 3.1494 \n",
      "epoch: 4 loss: 1.7034 1.4261 3.1295 \n",
      "epoch: 5 loss: 1.6757 1.4266 3.1023 \n",
      "epoch: 6 loss: 1.6622 1.4270 3.0893 \n",
      "epoch: 7 loss: 1.6346 1.4275 3.0621 \n",
      "epoch: 8 loss: 1.6115 1.4279 3.0395 \n",
      "epoch: 9 loss: 1.5916 1.4284 3.0200 \n",
      "val_accuracy: 0.5891\n",
      "epoch: 10 loss: 1.5618 1.4289 2.9906 \n",
      "epoch: 11 loss: 1.5463 1.4293 2.9756 \n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch: 12 loss: 1.5297 1.4298 2.9595 \n",
      "epoch: 13 loss: 1.5047 1.4302 2.9349 \n",
      "epoch: 14 loss: 1.4722 1.4307 2.9029 \n",
      "epoch: 15 loss: 1.4533 1.4311 2.8844 \n",
      "epoch: 16 loss: 1.4353 1.4316 2.8669 \n",
      "epoch: 17 loss: 1.4134 1.4320 2.8455 \n",
      "epoch: 18 loss: 1.4012 1.4325 2.8337 \n",
      "epoch: 19 loss: 1.3723 1.4329 2.8053 \n",
      "val_accuracy: 0.6299\n",
      "epoch: 20 loss: 1.3627 1.4334 2.7961 \n",
      "epoch: 21 loss: 1.3354 1.4338 2.7693 \n",
      "epoch: 22 loss: 1.3059 1.4343 2.7402 \n",
      "epoch: 23 loss: 1.2999 1.4348 2.7346 \n",
      "epoch: 24 loss: 1.2781 1.4352 2.7133 \n",
      "epoch: 25 loss: 1.2743 1.4357 2.7099 \n",
      "epoch: 26 loss: 1.2390 1.4361 2.6751 \n",
      "epoch: 27 loss: 1.2356 1.4366 2.6721 \n",
      "epoch: 28 loss: 1.2021 1.4370 2.6391 \n",
      "epoch: 29 loss: 1.1976 1.4374 2.6350 \n",
      "val_accuracy: 0.6873\n",
      "epoch: 30 loss: 1.1885 1.4379 2.6264 \n",
      "epoch: 31 loss: 1.1753 1.4383 2.6136 \n",
      "epoch: 32 loss: 1.1589 1.4388 2.5976 \n",
      "epoch: 33 loss: 1.1399 1.4392 2.5791 \n",
      "epoch: 34 loss: 1.1302 1.4397 2.5699 \n",
      "epoch: 35 loss: 1.1117 1.4401 2.5519 \n",
      "epoch: 36 loss: 1.0898 1.4406 2.5303 \n",
      "epoch: 37 loss: 1.0732 1.4410 2.5143 \n",
      "epoch: 38 loss: 1.0693 1.4415 2.5108 \n",
      "epoch: 39 loss: 1.0680 1.4419 2.5099 \n",
      "val_accuracy: 0.7236\n",
      "epoch: 40 loss: 1.0369 1.4423 2.4793 \n",
      "epoch: 41 loss: 1.0300 1.4428 2.4728 \n",
      "epoch: 42 loss: 1.0268 1.4432 2.4701 \n",
      "epoch: 43 loss: 1.0119 1.4437 2.4555 \n",
      "epoch: 44 loss: 1.0190 1.4441 2.4631 \n",
      "epoch: 45 loss: 0.9677 1.4446 2.4122 \n",
      "epoch: 46 loss: 0.9849 1.4450 2.4299 \n",
      "epoch: 47 loss: 0.9486 1.4454 2.3940 \n",
      "epoch: 48 loss: 0.9511 1.4459 2.3970 \n",
      "epoch: 49 loss: 0.9386 1.4463 2.3849 \n",
      "val_accuracy: 0.7538\n",
      "epoch: 50 loss: 0.9309 1.4467 2.3777 \n",
      "epoch: 51 loss: 0.9210 1.4472 2.3682 \n",
      "epoch: 52 loss: 0.9212 1.4476 2.3689 \n",
      "epoch: 53 loss: 0.8892 1.4481 2.3372 \n",
      "epoch: 54 loss: 0.8843 1.4485 2.3328 \n",
      "epoch: 55 loss: 0.8947 1.4489 2.3436 \n",
      "epoch: 56 loss: 0.8702 1.4494 2.3195 \n",
      "epoch: 57 loss: 0.8680 1.4498 2.3178 \n",
      "epoch: 58 loss: 0.8657 1.4502 2.3160 \n",
      "epoch: 59 loss: 0.8633 1.4507 2.3139 \n",
      "val_accuracy: 0.7598\n",
      "epoch: 60 loss: 0.8584 1.4511 2.3095 \n",
      "epoch: 61 loss: 0.8334 1.4515 2.2849 \n",
      "epoch: 62 loss: 0.8401 1.4520 2.2921 \n",
      "epoch: 63 loss: 0.8251 1.4524 2.2775 \n",
      "epoch: 64 loss: 0.8248 1.4528 2.2776 \n",
      "epoch: 65 loss: 0.8071 1.4533 2.2604 \n",
      "epoch: 66 loss: 0.8078 1.4537 2.2614 \n",
      "epoch: 67 loss: 0.8170 1.4541 2.2711 \n",
      "epoch: 68 loss: 0.7834 1.4545 2.2379 \n",
      "epoch: 69 loss: 0.7803 1.4550 2.2353 \n",
      "val_accuracy: 0.7674\n",
      "epoch: 70 loss: 0.7781 1.4554 2.2335 \n",
      "epoch: 71 loss: 0.7641 1.4558 2.2200 \n",
      "epoch: 72 loss: 0.7607 1.4563 2.2169 \n",
      "epoch: 73 loss: 0.7578 1.4567 2.2145 \n",
      "epoch: 74 loss: 0.7674 1.4571 2.2245 \n",
      "epoch: 75 loss: 0.7554 1.4575 2.2129 \n",
      "epoch: 76 loss: 0.7389 1.4580 2.1968 \n",
      "epoch: 77 loss: 0.7292 1.4584 2.1876 \n",
      "epoch: 78 loss: 0.7238 1.4588 2.1826 \n",
      "epoch: 79 loss: 0.7236 1.4592 2.1829 \n",
      "val_accuracy: 0.7719\n",
      "epoch: 80 loss: 0.7057 1.4596 2.1653 \n",
      "epoch: 81 loss: 0.7119 1.4601 2.1720 \n",
      "epoch: 82 loss: 0.7124 1.4605 2.1729 \n",
      "epoch: 83 loss: 0.6891 1.4609 2.1500 \n",
      "epoch: 84 loss: 0.6983 1.4613 2.1596 \n",
      "epoch: 85 loss: 0.6950 1.4617 2.1568 \n",
      "epoch: 86 loss: 0.6839 1.4622 2.1461 \n",
      "epoch: 87 loss: 0.6729 1.4626 2.1355 \n",
      "epoch: 88 loss: 0.6652 1.4630 2.1282 \n",
      "epoch: 89 loss: 0.6907 1.4634 2.1542 \n",
      "val_accuracy: 0.7734\n",
      "epoch: 90 loss: 0.6722 1.4638 2.1360 \n",
      "epoch: 91 loss: 0.6634 1.4643 2.1277 \n",
      "epoch: 92 loss: 0.6535 1.4647 2.1182 \n",
      "epoch: 93 loss: 0.6561 1.4651 2.1212 \n",
      "epoch: 94 loss: 0.6552 1.4655 2.1207 \n",
      "epoch: 95 loss: 0.6429 1.4659 2.1088 \n",
      "epoch: 96 loss: 0.6404 1.4663 2.1067 \n",
      "epoch: 97 loss: 0.6315 1.4667 2.0983 \n",
      "epoch: 98 loss: 0.6260 1.4672 2.0932 \n",
      "epoch: 99 loss: 0.6395 1.4676 2.1071 \n",
      "val_accuracy: 0.7704\n",
      "epoch: 100 loss: 0.6283 1.4680 2.0962 \n",
      "epoch: 101 loss: 0.6078 1.4684 2.0762 \n",
      "epoch: 102 loss: 0.6072 1.4688 2.0760 \n",
      "epoch: 103 loss: 0.6188 1.4692 2.0880 \n",
      "epoch: 104 loss: 0.6146 1.4696 2.0843 \n",
      "epoch: 105 loss: 0.6096 1.4700 2.0796 \n",
      "epoch: 106 loss: 0.6023 1.4704 2.0728 \n",
      "epoch: 107 loss: 0.5974 1.4709 2.0683 \n",
      "epoch: 108 loss: 0.5891 1.4713 2.0604 \n",
      "epoch: 109 loss: 0.5912 1.4717 2.0629 \n",
      "val_accuracy: 0.7689\n",
      "epoch: 110 loss: 0.5892 1.4721 2.0613 \n",
      "epoch: 111 loss: 0.5881 1.4725 2.0606 \n",
      "epoch: 112 loss: 0.5996 1.4729 2.0725 \n",
      "epoch: 113 loss: 0.5725 1.4733 2.0458 \n",
      "epoch: 114 loss: 0.5761 1.4737 2.0498 \n",
      "epoch: 115 loss: 0.5703 1.4741 2.0444 \n",
      "epoch: 116 loss: 0.5752 1.4745 2.0497 \n",
      "epoch: 117 loss: 0.5830 1.4749 2.0579 \n",
      "epoch: 118 loss: 0.5703 1.4753 2.0456 \n",
      "epoch: 119 loss: 0.5622 1.4757 2.0379 \n",
      "val_accuracy: 0.7704\n",
      "epoch: 120 loss: 0.5573 1.4761 2.0334 \n",
      "epoch: 121 loss: 0.5679 1.4765 2.0444 \n",
      "epoch: 122 loss: 0.5749 1.4769 2.0519 \n",
      "epoch: 123 loss: 0.5481 1.4773 2.0254 \n",
      "epoch: 124 loss: 0.5474 1.4777 2.0252 \n",
      "epoch: 125 loss: 0.5461 1.4781 2.0242 \n",
      "epoch: 126 loss: 0.5507 1.4785 2.0292 \n",
      "epoch: 127 loss: 0.5514 1.4789 2.0303 \n",
      "epoch: 128 loss: 0.5383 1.4793 2.0176 \n",
      "epoch: 129 loss: 0.5367 1.4797 2.0164 \n",
      "val_accuracy: 0.7704\n",
      "epoch: 130 loss: 0.5324 1.4801 2.0125 \n",
      "epoch: 131 loss: 0.5294 1.4805 2.0099 \n",
      "epoch: 132 loss: 0.5402 1.4809 2.0211 \n",
      "epoch: 133 loss: 0.5242 1.4813 2.0055 \n",
      "epoch: 134 loss: 0.5156 1.4817 1.9973 \n",
      "epoch: 135 loss: 0.5202 1.4821 2.0022 \n",
      "epoch: 136 loss: 0.5242 1.4825 2.0067 \n",
      "epoch: 137 loss: 0.5175 1.4829 2.0004 \n",
      "epoch: 138 loss: 0.5262 1.4833 2.0095 \n",
      "epoch: 139 loss: 0.5204 1.4837 2.0040 \n",
      "val_accuracy: 0.7659\n",
      "epoch: 140 loss: 0.4979 1.4840 1.9820 \n",
      "epoch: 141 loss: 0.5188 1.4844 2.0032 \n",
      "epoch: 142 loss: 0.5121 1.4848 1.9969 \n",
      "epoch: 143 loss: 0.4921 1.4852 1.9773 \n",
      "epoch: 144 loss: 0.5093 1.4856 1.9949 \n",
      "epoch: 145 loss: 0.5048 1.4860 1.9908 \n",
      "epoch: 146 loss: 0.4887 1.4864 1.9751 \n",
      "epoch: 147 loss: 0.4904 1.4868 1.9772 \n",
      "epoch: 148 loss: 0.4998 1.4872 1.9870 \n",
      "epoch: 149 loss: 0.4989 1.4876 1.9864 \n",
      "val_accuracy: 0.7644\n",
      "epoch: 150 loss: 0.4837 1.4879 1.9716 \n",
      "epoch: 151 loss: 0.4794 1.4883 1.9677 \n",
      "epoch: 152 loss: 0.5053 1.4887 1.9940 \n",
      "epoch: 153 loss: 0.4918 1.4891 1.9808 \n",
      "epoch: 154 loss: 0.4704 1.4895 1.9598 \n",
      "epoch: 155 loss: 0.4814 1.4899 1.9713 \n",
      "epoch: 156 loss: 0.4726 1.4903 1.9629 \n",
      "epoch: 157 loss: 0.4919 1.4906 1.9825 \n",
      "epoch: 158 loss: 0.4583 1.4910 1.9493 \n",
      "epoch: 159 loss: 0.4607 1.4914 1.9521 \n",
      "val_accuracy: 0.7659\n",
      "epoch: 160 loss: 0.4745 1.4918 1.9663 \n",
      "epoch: 161 loss: 0.4637 1.4922 1.9558 \n",
      "epoch: 162 loss: 0.4721 1.4925 1.9646 \n",
      "epoch: 163 loss: 0.4686 1.4929 1.9615 \n",
      "epoch: 164 loss: 0.4681 1.4933 1.9614 \n",
      "epoch: 165 loss: 0.4593 1.4937 1.9530 \n",
      "epoch: 166 loss: 0.4575 1.4941 1.9516 \n",
      "epoch: 167 loss: 0.4574 1.4944 1.9518 \n",
      "epoch: 168 loss: 0.4610 1.4948 1.9559 \n",
      "epoch: 169 loss: 0.4528 1.4952 1.9480 \n",
      "val_accuracy: 0.7598\n",
      "epoch: 170 loss: 0.4655 1.4956 1.9610 \n",
      "epoch: 171 loss: 0.4539 1.4959 1.9498 \n",
      "epoch: 172 loss: 0.4431 1.4963 1.9395 \n",
      "epoch: 173 loss: 0.4478 1.4967 1.9445 \n",
      "epoch: 174 loss: 0.4588 1.4971 1.9558 \n",
      "epoch: 175 loss: 0.4471 1.4975 1.9446 \n",
      "epoch: 176 loss: 0.4415 1.4978 1.9394 \n",
      "epoch: 177 loss: 0.4485 1.4982 1.9467 \n",
      "epoch: 178 loss: 0.4396 1.4986 1.9382 \n",
      "epoch: 179 loss: 0.4563 1.4989 1.9552 \n",
      "val_accuracy: 0.7538\n",
      "epoch: 180 loss: 0.4461 1.4993 1.9454 \n",
      "epoch: 181 loss: 0.4348 1.4997 1.9345 \n",
      "epoch: 182 loss: 0.4447 1.5001 1.9447 \n",
      "epoch: 183 loss: 0.4452 1.5004 1.9456 \n",
      "epoch: 184 loss: 0.4452 1.5008 1.9460 \n",
      "epoch: 185 loss: 0.4504 1.5012 1.9516 \n",
      "epoch: 186 loss: 0.4275 1.5015 1.9291 \n",
      "epoch: 187 loss: 0.4328 1.5019 1.9347 \n",
      "epoch: 188 loss: 0.4281 1.5023 1.9303 \n",
      "epoch: 189 loss: 0.4324 1.5026 1.9350 \n",
      "val_accuracy: 0.7538\n",
      "epoch: 190 loss: 0.4243 1.5030 1.9273 \n",
      "epoch: 191 loss: 0.4324 1.5034 1.9358 \n",
      "epoch: 192 loss: 0.4316 1.5037 1.9354 \n",
      "epoch: 193 loss: 0.4224 1.5041 1.9265 \n",
      "epoch: 194 loss: 0.4248 1.5045 1.9292 \n",
      "epoch: 195 loss: 0.4168 1.5048 1.9216 \n",
      "epoch: 196 loss: 0.4190 1.5052 1.9242 \n",
      "epoch: 197 loss: 0.4208 1.5056 1.9264 \n",
      "epoch: 198 loss: 0.4168 1.5059 1.9228 \n",
      "epoch: 199 loss: 0.4191 1.5063 1.9254 \n",
      "val_accuracy: 0.7523\n",
      "Parameter containing:\n",
      "tensor([0.8085, 0.8085, 0.8083,  ..., 0.8056, 0.8085, 0.8085], device='cuda:0',\n",
      "       requires_grad=True)\n",
      "Parameter containing:\n",
      "tensor([1., 1., 1.,  ..., 1., 1., 1.], device='cuda:0', requires_grad=True)\n",
      "epoch: 0 loss: 1.8008 1.4243 3.2251 \n",
      "epoch: 1 loss: 1.7803 1.4247 3.2050 \n",
      "epoch: 2 loss: 1.7638 1.4252 3.1891 \n",
      "epoch: 3 loss: 1.7463 1.4257 3.1720 \n",
      "epoch: 4 loss: 1.7279 1.4261 3.1540 \n",
      "epoch: 5 loss: 1.7076 1.4266 3.1341 \n",
      "epoch: 6 loss: 1.6864 1.4270 3.1134 \n",
      "epoch: 7 loss: 1.6632 1.4275 3.0907 \n",
      "epoch: 8 loss: 1.6377 1.4279 3.0657 \n",
      "epoch: 9 loss: 1.6148 1.4284 3.0432 \n",
      "val_accuracy: 0.6435\n",
      "epoch: 10 loss: 1.5918 1.4289 3.0207 \n",
      "epoch: 11 loss: 1.5683 1.4293 2.9976 \n",
      "epoch: 12 loss: 1.5405 1.4298 2.9702 \n",
      "epoch: 13 loss: 1.5142 1.4302 2.9444 \n",
      "epoch: 14 loss: 1.4942 1.4307 2.9248 \n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch: 15 loss: 1.4800 1.4311 2.9111 \n",
      "epoch: 16 loss: 1.4449 1.4316 2.8765 \n",
      "epoch: 17 loss: 1.4314 1.4320 2.8634 \n",
      "epoch: 18 loss: 1.4057 1.4325 2.8382 \n",
      "epoch: 19 loss: 1.3887 1.4329 2.8216 \n",
      "val_accuracy: 0.6964\n",
      "epoch: 20 loss: 1.3559 1.4334 2.7893 \n",
      "epoch: 21 loss: 1.3414 1.4338 2.7753 \n",
      "epoch: 22 loss: 1.3218 1.4343 2.7561 \n",
      "epoch: 23 loss: 1.3100 1.4348 2.7448 \n",
      "epoch: 24 loss: 1.2810 1.4352 2.7162 \n",
      "epoch: 25 loss: 1.2668 1.4357 2.7025 \n",
      "epoch: 26 loss: 1.2378 1.4361 2.6739 \n",
      "epoch: 27 loss: 1.2340 1.4366 2.6706 \n",
      "epoch: 28 loss: 1.2141 1.4370 2.6511 \n",
      "epoch: 29 loss: 1.1964 1.4374 2.6338 \n",
      "val_accuracy: 0.7372\n",
      "epoch: 30 loss: 1.1702 1.4379 2.6081 \n",
      "epoch: 31 loss: 1.1602 1.4383 2.5986 \n",
      "epoch: 32 loss: 1.1461 1.4388 2.5849 \n",
      "epoch: 33 loss: 1.1444 1.4392 2.5837 \n",
      "epoch: 34 loss: 1.1183 1.4397 2.5580 \n",
      "epoch: 35 loss: 1.0918 1.4401 2.5320 \n",
      "epoch: 36 loss: 1.0811 1.4406 2.5216 \n",
      "epoch: 37 loss: 1.0749 1.4410 2.5159 \n",
      "epoch: 38 loss: 1.0619 1.4415 2.5034 \n",
      "epoch: 39 loss: 1.0431 1.4419 2.4850 \n",
      "val_accuracy: 0.7538\n",
      "epoch: 40 loss: 1.0456 1.4423 2.4879 \n",
      "epoch: 41 loss: 1.0231 1.4428 2.4659 \n",
      "epoch: 42 loss: 1.0130 1.4432 2.4562 \n",
      "epoch: 43 loss: 1.0019 1.4437 2.4456 \n",
      "epoch: 44 loss: 0.9946 1.4441 2.4387 \n",
      "epoch: 45 loss: 0.9699 1.4446 2.4145 \n",
      "epoch: 46 loss: 0.9619 1.4450 2.4069 \n",
      "epoch: 47 loss: 0.9497 1.4454 2.3951 \n",
      "epoch: 48 loss: 0.9447 1.4459 2.3906 \n",
      "epoch: 49 loss: 0.9295 1.4463 2.3758 \n",
      "val_accuracy: 0.7628\n",
      "epoch: 50 loss: 0.9308 1.4467 2.3775 \n",
      "epoch: 51 loss: 0.9171 1.4472 2.3643 \n",
      "epoch: 52 loss: 0.9163 1.4476 2.3639 \n",
      "epoch: 53 loss: 0.8939 1.4481 2.3420 \n",
      "epoch: 54 loss: 0.8862 1.4485 2.3347 \n",
      "epoch: 55 loss: 0.8794 1.4489 2.3283 \n",
      "epoch: 56 loss: 0.8782 1.4494 2.3276 \n",
      "epoch: 57 loss: 0.8551 1.4498 2.3049 \n",
      "epoch: 58 loss: 0.8482 1.4502 2.2984 \n",
      "epoch: 59 loss: 0.8359 1.4507 2.2866 \n",
      "val_accuracy: 0.7659\n",
      "epoch: 60 loss: 0.8409 1.4511 2.2920 \n",
      "epoch: 61 loss: 0.8317 1.4515 2.2833 \n",
      "epoch: 62 loss: 0.8230 1.4520 2.2749 \n",
      "epoch: 63 loss: 0.8202 1.4524 2.2726 \n",
      "epoch: 64 loss: 0.8123 1.4528 2.2651 \n",
      "epoch: 65 loss: 0.7897 1.4533 2.2429 \n",
      "epoch: 66 loss: 0.7838 1.4537 2.2375 \n",
      "epoch: 67 loss: 0.7896 1.4541 2.2438 \n",
      "epoch: 68 loss: 0.7902 1.4545 2.2447 \n",
      "epoch: 69 loss: 0.7811 1.4550 2.2361 \n",
      "val_accuracy: 0.7704\n",
      "epoch: 70 loss: 0.7463 1.4554 2.2017 \n",
      "epoch: 71 loss: 0.7569 1.4558 2.2127 \n",
      "epoch: 72 loss: 0.7343 1.4563 2.1906 \n",
      "epoch: 73 loss: 0.7373 1.4567 2.1940 \n",
      "epoch: 74 loss: 0.7397 1.4571 2.1968 \n",
      "epoch: 75 loss: 0.7411 1.4575 2.1986 \n",
      "epoch: 76 loss: 0.7325 1.4580 2.1905 \n",
      "epoch: 77 loss: 0.7296 1.4584 2.1879 \n",
      "epoch: 78 loss: 0.7165 1.4588 2.1753 \n",
      "epoch: 79 loss: 0.7106 1.4592 2.1698 \n",
      "val_accuracy: 0.7613\n",
      "epoch: 80 loss: 0.7354 1.4596 2.1951 \n",
      "epoch: 81 loss: 0.7285 1.4601 2.1885 \n",
      "epoch: 82 loss: 0.6925 1.4605 2.1530 \n",
      "epoch: 83 loss: 0.6855 1.4609 2.1464 \n",
      "epoch: 84 loss: 0.6870 1.4613 2.1483 \n",
      "epoch: 85 loss: 0.6885 1.4617 2.1502 \n",
      "epoch: 86 loss: 0.6790 1.4622 2.1412 \n",
      "epoch: 87 loss: 0.6841 1.4626 2.1467 \n",
      "epoch: 88 loss: 0.6664 1.4630 2.1294 \n",
      "epoch: 89 loss: 0.6783 1.4634 2.1417 \n",
      "val_accuracy: 0.7644\n",
      "epoch: 90 loss: 0.6569 1.4638 2.1207 \n",
      "epoch: 91 loss: 0.6579 1.4643 2.1222 \n",
      "epoch: 92 loss: 0.6664 1.4647 2.1311 \n",
      "epoch: 93 loss: 0.6599 1.4651 2.1249 \n",
      "epoch: 94 loss: 0.6482 1.4655 2.1137 \n",
      "epoch: 95 loss: 0.6523 1.4659 2.1182 \n",
      "epoch: 96 loss: 0.6456 1.4663 2.1120 \n",
      "epoch: 97 loss: 0.6327 1.4667 2.0995 \n",
      "epoch: 98 loss: 0.6466 1.4672 2.1138 \n",
      "epoch: 99 loss: 0.6335 1.4676 2.1010 \n",
      "val_accuracy: 0.7734\n",
      "epoch: 100 loss: 0.6379 1.4680 2.1059 \n",
      "epoch: 101 loss: 0.6254 1.4684 2.0938 \n",
      "epoch: 102 loss: 0.6201 1.4688 2.0889 \n",
      "epoch: 103 loss: 0.6284 1.4692 2.0976 \n",
      "epoch: 104 loss: 0.6271 1.4696 2.0967 \n",
      "epoch: 105 loss: 0.6040 1.4700 2.0740 \n",
      "epoch: 106 loss: 0.6044 1.4704 2.0748 \n",
      "epoch: 107 loss: 0.6060 1.4709 2.0768 \n",
      "epoch: 108 loss: 0.6013 1.4713 2.0726 \n",
      "epoch: 109 loss: 0.5925 1.4717 2.0642 \n",
      "val_accuracy: 0.7674\n",
      "epoch: 110 loss: 0.6044 1.4721 2.0765 \n",
      "epoch: 111 loss: 0.5914 1.4725 2.0639 \n",
      "epoch: 112 loss: 0.5972 1.4729 2.0700 \n",
      "epoch: 113 loss: 0.5869 1.4733 2.0602 \n",
      "epoch: 114 loss: 0.5912 1.4737 2.0649 \n",
      "epoch: 115 loss: 0.5897 1.4741 2.0638 \n",
      "epoch: 116 loss: 0.5799 1.4745 2.0544 \n",
      "epoch: 117 loss: 0.5569 1.4749 2.0318 \n",
      "epoch: 118 loss: 0.5716 1.4753 2.0470 \n",
      "epoch: 119 loss: 0.5644 1.4757 2.0401 \n",
      "val_accuracy: 0.7704\n",
      "epoch: 120 loss: 0.5750 1.4761 2.0511 \n",
      "epoch: 121 loss: 0.5761 1.4765 2.0526 \n",
      "epoch: 122 loss: 0.5613 1.4769 2.0383 \n",
      "epoch: 123 loss: 0.5652 1.4773 2.0426 \n",
      "epoch: 124 loss: 0.5579 1.4777 2.0356 \n",
      "epoch: 125 loss: 0.5551 1.4781 2.0332 \n",
      "epoch: 126 loss: 0.5566 1.4785 2.0351 \n",
      "epoch: 127 loss: 0.5502 1.4789 2.0291 \n",
      "epoch: 128 loss: 0.5417 1.4793 2.0211 \n",
      "epoch: 129 loss: 0.5604 1.4797 2.0401 \n",
      "val_accuracy: 0.7674\n",
      "epoch: 130 loss: 0.5297 1.4801 2.0098 \n",
      "epoch: 131 loss: 0.5487 1.4805 2.0292 \n",
      "epoch: 132 loss: 0.5422 1.4809 2.0231 \n",
      "epoch: 133 loss: 0.5365 1.4813 2.0178 \n",
      "epoch: 134 loss: 0.5398 1.4817 2.0215 \n",
      "epoch: 135 loss: 0.5271 1.4821 2.0092 \n",
      "epoch: 136 loss: 0.5205 1.4825 2.0030 \n",
      "epoch: 137 loss: 0.5219 1.4829 2.0048 \n",
      "epoch: 138 loss: 0.5352 1.4833 2.0184 \n",
      "epoch: 139 loss: 0.5033 1.4837 1.9870 \n",
      "val_accuracy: 0.7659\n",
      "epoch: 140 loss: 0.5170 1.4840 2.0011 \n",
      "epoch: 141 loss: 0.5178 1.4844 2.0022 \n",
      "epoch: 142 loss: 0.5184 1.4848 2.0033 \n",
      "epoch: 143 loss: 0.5064 1.4852 1.9916 \n",
      "epoch: 144 loss: 0.5108 1.4856 1.9964 \n",
      "epoch: 145 loss: 0.5146 1.4860 2.0006 \n",
      "epoch: 146 loss: 0.4961 1.4864 1.9825 \n",
      "epoch: 147 loss: 0.5012 1.4868 1.9880 \n",
      "epoch: 148 loss: 0.4932 1.4872 1.9804 \n",
      "epoch: 149 loss: 0.4922 1.4876 1.9798 \n",
      "val_accuracy: 0.7644\n",
      "epoch: 150 loss: 0.4947 1.4879 1.9827 \n",
      "epoch: 151 loss: 0.4912 1.4883 1.9796 \n",
      "epoch: 152 loss: 0.4895 1.4887 1.9782 \n",
      "epoch: 153 loss: 0.4819 1.4891 1.9710 \n",
      "epoch: 154 loss: 0.5053 1.4895 1.9948 \n",
      "epoch: 155 loss: 0.4894 1.4899 1.9793 \n",
      "epoch: 156 loss: 0.4801 1.4903 1.9704 \n",
      "epoch: 157 loss: 0.4918 1.4906 1.9825 \n",
      "epoch: 158 loss: 0.4777 1.4910 1.9687 \n",
      "epoch: 159 loss: 0.4972 1.4914 1.9886 \n",
      "val_accuracy: 0.7644\n",
      "epoch: 160 loss: 0.4685 1.4918 1.9603 \n",
      "epoch: 161 loss: 0.4953 1.4922 1.9875 \n",
      "epoch: 162 loss: 0.4738 1.4925 1.9663 \n",
      "epoch: 163 loss: 0.4709 1.4929 1.9638 \n",
      "epoch: 164 loss: 0.4709 1.4933 1.9642 \n",
      "epoch: 165 loss: 0.4826 1.4937 1.9763 \n",
      "epoch: 166 loss: 0.4741 1.4941 1.9681 \n",
      "epoch: 167 loss: 0.4649 1.4944 1.9594 \n",
      "epoch: 168 loss: 0.4653 1.4948 1.9601 \n",
      "epoch: 169 loss: 0.4747 1.4952 1.9699 \n",
      "val_accuracy: 0.7659\n",
      "epoch: 170 loss: 0.4731 1.4956 1.9687 \n",
      "epoch: 171 loss: 0.4755 1.4959 1.9715 \n",
      "epoch: 172 loss: 0.4427 1.4963 1.9390 \n",
      "epoch: 173 loss: 0.4643 1.4967 1.9610 \n",
      "epoch: 174 loss: 0.4584 1.4971 1.9555 \n",
      "epoch: 175 loss: 0.4613 1.4975 1.9588 \n",
      "epoch: 176 loss: 0.4468 1.4978 1.9446 \n",
      "epoch: 177 loss: 0.4524 1.4982 1.9506 \n",
      "epoch: 178 loss: 0.4591 1.4986 1.9577 \n",
      "epoch: 179 loss: 0.4441 1.4989 1.9430 \n",
      "val_accuracy: 0.7613\n",
      "epoch: 180 loss: 0.4530 1.4993 1.9524 \n",
      "epoch: 181 loss: 0.4562 1.4997 1.9559 \n",
      "epoch: 182 loss: 0.4533 1.5001 1.9534 \n",
      "epoch: 183 loss: 0.4407 1.5004 1.9411 \n",
      "epoch: 184 loss: 0.4472 1.5008 1.9480 \n",
      "epoch: 185 loss: 0.4411 1.5012 1.9423 \n",
      "epoch: 186 loss: 0.4515 1.5015 1.9531 \n",
      "epoch: 187 loss: 0.4392 1.5019 1.9411 \n",
      "epoch: 188 loss: 0.4476 1.5023 1.9499 \n",
      "epoch: 189 loss: 0.4218 1.5026 1.9244 \n",
      "val_accuracy: 0.7598\n",
      "epoch: 190 loss: 0.4304 1.5030 1.9334 \n",
      "epoch: 191 loss: 0.4492 1.5034 1.9526 \n",
      "epoch: 192 loss: 0.4403 1.5037 1.9440 \n",
      "epoch: 193 loss: 0.4341 1.5041 1.9382 \n",
      "epoch: 194 loss: 0.4231 1.5045 1.9276 \n",
      "epoch: 195 loss: 0.4371 1.5048 1.9420 \n",
      "epoch: 196 loss: 0.4226 1.5052 1.9278 \n",
      "epoch: 197 loss: 0.4255 1.5056 1.9310 \n",
      "epoch: 198 loss: 0.4251 1.5059 1.9310 \n",
      "epoch: 199 loss: 0.4340 1.5063 1.9403 \n",
      "val_accuracy: 0.7583\n",
      "Parameter containing:\n",
      "tensor([0.8085, 0.8085, 0.8083,  ..., 0.8056, 0.8085, 0.8085], device='cuda:0',\n",
      "       requires_grad=True)\n",
      "mean_accuracy: 0.7748\n"
     ]
    }
   ],
   "source": [
    "accuracy = []\n",
    "\n",
    "device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')\n",
    "\n",
    "for i in range(3):\n",
    "    model = GCN_LPA(data.x.shape[1], data.edge_index.shape[1],len(label_to_index)).to(device)\n",
    "    gamma = 1\n",
    "    \n",
    "    print(model.LPA.edge_weight)\n",
    "\n",
    "    optimizer = torch.optim.Adam(model.parameters(), lr=1e-3, weight_decay=5e-4)\n",
    "\n",
    "    max_val_acc = 0\n",
    "    text_val_acc = 0\n",
    "    \n",
    "    model.train()\n",
    "    for epoch in range(200):\n",
    "        optimizer.zero_grad()\n",
    "        out1, out2 = model(data, train_mask)\n",
    "        loss1 = F.nll_loss(out1[train_mask], data.y[train_mask])\n",
    "        loss2 = F.nll_loss(out2[train_mask], data.y[train_mask])\n",
    "\n",
    "        loss = loss1 + gamma * loss2\n",
    "\n",
    "        print('epoch: %d loss: %.4f %.4f %.4f ' %(epoch, loss1, loss2, loss))\n",
    "        loss.backward()\n",
    "        optimizer.step()\n",
    "\n",
    "        if((epoch + 1)% 10 == 0):\n",
    "            model.eval()\n",
    "            out1, out2 = model(data, train_mask)\n",
    "            _, pred = out1.max(dim=1)\n",
    "            correct = int(pred[val_mask].eq(data.y[val_mask]).sum().item())\n",
    "            acc = correct / len(val_mask)\n",
    "            \n",
    "            if(acc > max_val_acc):\n",
    "                out1, out2 = model(data, train_mask)\n",
    "                _, pred = out1.max(dim=1)\n",
    "                correct = int(pred[test_mask].eq(data.y[test_mask]).sum().item())\n",
    "                text_val_acc = correct / len(test_mask)\n",
    "            print('val_accuracy: {:.4f}'.format(acc))\n",
    "            model.train()\n",
    "    accuracy.append(text_val_acc)\n",
    "    print(model.LPA.edge_weight)\n",
    "print(\"mean_accuracy: %.4f\" % (sum(accuracy)/len(accuracy)))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 44,
   "metadata": {
    "ExecuteTime": {
     "end_time": "2021-06-21T06:33:38.190730Z",
     "start_time": "2021-06-21T06:33:35.892169Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch: 0 loss: 1.8035\n",
      "epoch: 1 loss: 1.7868\n",
      "epoch: 2 loss: 1.7668\n",
      "epoch: 3 loss: 1.7519\n",
      "epoch: 4 loss: 1.7357\n",
      "epoch: 5 loss: 1.7193\n",
      "epoch: 6 loss: 1.7033\n",
      "epoch: 7 loss: 1.6828\n",
      "epoch: 8 loss: 1.6650\n",
      "epoch: 9 loss: 1.6422\n",
      "val_accuracy: 0.5151\n",
      "epoch: 10 loss: 1.6232\n",
      "epoch: 11 loss: 1.6034\n",
      "epoch: 12 loss: 1.5786\n",
      "epoch: 13 loss: 1.5569\n",
      "epoch: 14 loss: 1.5357\n",
      "epoch: 15 loss: 1.5108\n",
      "epoch: 16 loss: 1.4941\n",
      "epoch: 17 loss: 1.4768\n",
      "epoch: 18 loss: 1.4490\n",
      "epoch: 19 loss: 1.4259\n",
      "val_accuracy: 0.5876\n",
      "epoch: 20 loss: 1.4195\n",
      "epoch: 21 loss: 1.3941\n",
      "epoch: 22 loss: 1.3696\n",
      "epoch: 23 loss: 1.3470\n",
      "epoch: 24 loss: 1.3349\n",
      "epoch: 25 loss: 1.3192\n",
      "epoch: 26 loss: 1.2923\n",
      "epoch: 27 loss: 1.2805\n",
      "epoch: 28 loss: 1.2621\n",
      "epoch: 29 loss: 1.2452\n",
      "val_accuracy: 0.7160\n",
      "epoch: 30 loss: 1.2264\n",
      "epoch: 31 loss: 1.2073\n",
      "epoch: 32 loss: 1.1976\n",
      "epoch: 33 loss: 1.1840\n",
      "epoch: 34 loss: 1.1556\n",
      "epoch: 35 loss: 1.1498\n",
      "epoch: 36 loss: 1.1249\n",
      "epoch: 37 loss: 1.1190\n",
      "epoch: 38 loss: 1.0933\n",
      "epoch: 39 loss: 1.0878\n",
      "val_accuracy: 0.7538\n",
      "epoch: 40 loss: 1.0713\n",
      "epoch: 41 loss: 1.0486\n",
      "epoch: 42 loss: 1.0489\n",
      "epoch: 43 loss: 1.0338\n",
      "epoch: 44 loss: 1.0341\n",
      "epoch: 45 loss: 1.0122\n",
      "epoch: 46 loss: 1.0098\n",
      "epoch: 47 loss: 0.9856\n",
      "epoch: 48 loss: 0.9606\n",
      "epoch: 49 loss: 0.9508\n",
      "val_accuracy: 0.7644\n",
      "epoch: 50 loss: 0.9499\n",
      "epoch: 51 loss: 0.9327\n",
      "epoch: 52 loss: 0.9190\n",
      "epoch: 53 loss: 0.9161\n",
      "epoch: 54 loss: 0.8947\n",
      "epoch: 55 loss: 0.8992\n",
      "epoch: 56 loss: 0.8978\n",
      "epoch: 57 loss: 0.8837\n",
      "epoch: 58 loss: 0.8672\n",
      "epoch: 59 loss: 0.8596\n",
      "val_accuracy: 0.7734\n",
      "epoch: 60 loss: 0.8514\n",
      "epoch: 61 loss: 0.8289\n",
      "epoch: 62 loss: 0.8240\n",
      "epoch: 63 loss: 0.8000\n",
      "epoch: 64 loss: 0.8116\n",
      "epoch: 65 loss: 0.8053\n",
      "epoch: 66 loss: 0.8177\n",
      "epoch: 67 loss: 0.7949\n",
      "epoch: 68 loss: 0.7907\n",
      "epoch: 69 loss: 0.7793\n",
      "val_accuracy: 0.7795\n",
      "epoch: 70 loss: 0.7635\n",
      "epoch: 71 loss: 0.7497\n",
      "epoch: 72 loss: 0.7425\n",
      "epoch: 73 loss: 0.7487\n",
      "epoch: 74 loss: 0.7368\n",
      "epoch: 75 loss: 0.7371\n",
      "epoch: 76 loss: 0.7151\n",
      "epoch: 77 loss: 0.7298\n",
      "epoch: 78 loss: 0.7156\n",
      "epoch: 79 loss: 0.7201\n",
      "val_accuracy: 0.7840\n",
      "epoch: 80 loss: 0.7186\n",
      "epoch: 81 loss: 0.7041\n",
      "epoch: 82 loss: 0.6953\n",
      "epoch: 83 loss: 0.7003\n",
      "epoch: 84 loss: 0.6836\n",
      "epoch: 85 loss: 0.6919\n",
      "epoch: 86 loss: 0.6797\n",
      "epoch: 87 loss: 0.6802\n",
      "epoch: 88 loss: 0.6887\n",
      "epoch: 89 loss: 0.6788\n",
      "val_accuracy: 0.7795\n",
      "epoch: 90 loss: 0.6632\n",
      "epoch: 91 loss: 0.6498\n",
      "epoch: 92 loss: 0.6388\n",
      "epoch: 93 loss: 0.6617\n",
      "epoch: 94 loss: 0.6372\n",
      "epoch: 95 loss: 0.6484\n",
      "epoch: 96 loss: 0.6279\n",
      "epoch: 97 loss: 0.6418\n",
      "epoch: 98 loss: 0.6352\n",
      "epoch: 99 loss: 0.6163\n",
      "val_accuracy: 0.7749\n",
      "epoch: 100 loss: 0.6167\n",
      "epoch: 101 loss: 0.6208\n",
      "epoch: 102 loss: 0.6072\n",
      "epoch: 103 loss: 0.6002\n",
      "epoch: 104 loss: 0.6200\n",
      "epoch: 105 loss: 0.6029\n",
      "epoch: 106 loss: 0.5934\n",
      "epoch: 107 loss: 0.6034\n",
      "epoch: 108 loss: 0.5962\n",
      "epoch: 109 loss: 0.5803\n",
      "val_accuracy: 0.7719\n",
      "epoch: 110 loss: 0.5809\n",
      "epoch: 111 loss: 0.5890\n",
      "epoch: 112 loss: 0.5814\n",
      "epoch: 113 loss: 0.5753\n",
      "epoch: 114 loss: 0.5764\n",
      "epoch: 115 loss: 0.5649\n",
      "epoch: 116 loss: 0.5667\n",
      "epoch: 117 loss: 0.5629\n",
      "epoch: 118 loss: 0.5492\n",
      "epoch: 119 loss: 0.5609\n",
      "val_accuracy: 0.7689\n",
      "epoch: 120 loss: 0.5560\n",
      "epoch: 121 loss: 0.5469\n",
      "epoch: 122 loss: 0.5537\n",
      "epoch: 123 loss: 0.5389\n",
      "epoch: 124 loss: 0.5421\n",
      "epoch: 125 loss: 0.5496\n",
      "epoch: 126 loss: 0.5350\n",
      "epoch: 127 loss: 0.5316\n",
      "epoch: 128 loss: 0.5390\n",
      "epoch: 129 loss: 0.5188\n",
      "val_accuracy: 0.7659\n",
      "epoch: 130 loss: 0.5270\n",
      "epoch: 131 loss: 0.5145\n",
      "epoch: 132 loss: 0.5375\n",
      "epoch: 133 loss: 0.5237\n",
      "epoch: 134 loss: 0.5101\n",
      "epoch: 135 loss: 0.5214\n",
      "epoch: 136 loss: 0.5091\n",
      "epoch: 137 loss: 0.5183\n",
      "epoch: 138 loss: 0.5188\n",
      "epoch: 139 loss: 0.5179\n",
      "val_accuracy: 0.7659\n",
      "epoch: 140 loss: 0.5141\n",
      "epoch: 141 loss: 0.5027\n",
      "epoch: 142 loss: 0.4969\n",
      "epoch: 143 loss: 0.5081\n",
      "epoch: 144 loss: 0.4963\n",
      "epoch: 145 loss: 0.4901\n",
      "epoch: 146 loss: 0.4796\n",
      "epoch: 147 loss: 0.4877\n",
      "epoch: 148 loss: 0.4893\n",
      "epoch: 149 loss: 0.4930\n",
      "val_accuracy: 0.7644\n",
      "epoch: 150 loss: 0.4895\n",
      "epoch: 151 loss: 0.4925\n",
      "epoch: 152 loss: 0.4794\n",
      "epoch: 153 loss: 0.4849\n",
      "epoch: 154 loss: 0.4801\n",
      "epoch: 155 loss: 0.4747\n",
      "epoch: 156 loss: 0.4844\n",
      "epoch: 157 loss: 0.4792\n",
      "epoch: 158 loss: 0.4725\n",
      "epoch: 159 loss: 0.4758\n",
      "val_accuracy: 0.7674\n",
      "epoch: 160 loss: 0.4748\n",
      "epoch: 161 loss: 0.4718\n",
      "epoch: 162 loss: 0.4653\n",
      "epoch: 163 loss: 0.4697\n",
      "epoch: 164 loss: 0.4540\n",
      "epoch: 165 loss: 0.4580\n",
      "epoch: 166 loss: 0.4547\n",
      "epoch: 167 loss: 0.4562\n",
      "epoch: 168 loss: 0.4652\n",
      "epoch: 169 loss: 0.4460\n",
      "val_accuracy: 0.7644\n",
      "epoch: 170 loss: 0.4534\n",
      "epoch: 171 loss: 0.4478\n",
      "epoch: 172 loss: 0.4517\n",
      "epoch: 173 loss: 0.4502\n",
      "epoch: 174 loss: 0.4490\n",
      "epoch: 175 loss: 0.4466\n",
      "epoch: 176 loss: 0.4390\n",
      "epoch: 177 loss: 0.4441\n",
      "epoch: 178 loss: 0.4463\n",
      "epoch: 179 loss: 0.4461\n",
      "val_accuracy: 0.7659\n",
      "epoch: 180 loss: 0.4385\n",
      "epoch: 181 loss: 0.4422\n",
      "epoch: 182 loss: 0.4316\n",
      "epoch: 183 loss: 0.4517\n",
      "epoch: 184 loss: 0.4332\n",
      "epoch: 185 loss: 0.4384\n",
      "epoch: 186 loss: 0.4274\n",
      "epoch: 187 loss: 0.4285\n",
      "epoch: 188 loss: 0.4234\n",
      "epoch: 189 loss: 0.4263\n",
      "val_accuracy: 0.7628\n",
      "epoch: 190 loss: 0.4316\n",
      "epoch: 191 loss: 0.4196\n",
      "epoch: 192 loss: 0.4178\n",
      "epoch: 193 loss: 0.4305\n",
      "epoch: 194 loss: 0.4201\n",
      "epoch: 195 loss: 0.4263\n",
      "epoch: 196 loss: 0.4237\n",
      "epoch: 197 loss: 0.4236\n",
      "epoch: 198 loss: 0.4148\n",
      "epoch: 199 loss: 0.4193\n",
      "val_accuracy: 0.7613\n",
      "epoch: 0 loss: 0.4145\n",
      "epoch: 1 loss: 0.4197\n",
      "epoch: 2 loss: 0.4138\n",
      "epoch: 3 loss: 0.4044\n",
      "epoch: 4 loss: 0.4067\n",
      "epoch: 5 loss: 0.4039\n",
      "epoch: 6 loss: 0.4041\n",
      "epoch: 7 loss: 0.4100\n",
      "epoch: 8 loss: 0.3904\n",
      "epoch: 9 loss: 0.3982\n",
      "val_accuracy: 0.7583\n",
      "epoch: 10 loss: 0.3872\n",
      "epoch: 11 loss: 0.3924\n",
      "epoch: 12 loss: 0.3939\n",
      "epoch: 13 loss: 0.3965\n",
      "epoch: 14 loss: 0.3979\n",
      "epoch: 15 loss: 0.3963\n",
      "epoch: 16 loss: 0.3892\n",
      "epoch: 17 loss: 0.3818\n",
      "epoch: 18 loss: 0.3773\n",
      "epoch: 19 loss: 0.3802\n",
      "val_accuracy: 0.7568\n",
      "epoch: 20 loss: 0.3785\n",
      "epoch: 21 loss: 0.3795\n",
      "epoch: 22 loss: 0.3846\n",
      "epoch: 23 loss: 0.3762\n",
      "epoch: 24 loss: 0.3775\n",
      "epoch: 25 loss: 0.3782\n",
      "epoch: 26 loss: 0.3748\n",
      "epoch: 27 loss: 0.3676\n",
      "epoch: 28 loss: 0.3637\n",
      "epoch: 29 loss: 0.3744\n",
      "val_accuracy: 0.7583\n",
      "epoch: 30 loss: 0.3935\n",
      "epoch: 31 loss: 0.3681\n",
      "epoch: 32 loss: 0.3748\n",
      "epoch: 33 loss: 0.3596\n",
      "epoch: 34 loss: 0.3620\n",
      "epoch: 35 loss: 0.3545\n",
      "epoch: 36 loss: 0.3595\n",
      "epoch: 37 loss: 0.3659\n",
      "epoch: 38 loss: 0.3653\n",
      "epoch: 39 loss: 0.3635\n",
      "val_accuracy: 0.7568\n",
      "epoch: 40 loss: 0.3562\n",
      "epoch: 41 loss: 0.3699\n",
      "epoch: 42 loss: 0.3565\n",
      "epoch: 43 loss: 0.3522\n",
      "epoch: 44 loss: 0.3530\n",
      "epoch: 45 loss: 0.3534\n",
      "epoch: 46 loss: 0.3579\n",
      "epoch: 47 loss: 0.3530\n",
      "epoch: 48 loss: 0.3446\n",
      "epoch: 49 loss: 0.3494\n",
      "val_accuracy: 0.7553\n",
      "epoch: 50 loss: 0.3466\n",
      "epoch: 51 loss: 0.3516\n",
      "epoch: 52 loss: 0.3419\n",
      "epoch: 53 loss: 0.3494\n",
      "epoch: 54 loss: 0.3373\n",
      "epoch: 55 loss: 0.3481\n",
      "epoch: 56 loss: 0.3314\n",
      "epoch: 57 loss: 0.3387\n",
      "epoch: 58 loss: 0.3399\n",
      "epoch: 59 loss: 0.3429\n",
      "val_accuracy: 0.7492\n",
      "epoch: 60 loss: 0.3442\n",
      "epoch: 61 loss: 0.3391\n",
      "epoch: 62 loss: 0.3424\n",
      "epoch: 63 loss: 0.3369\n",
      "epoch: 64 loss: 0.3252\n",
      "epoch: 65 loss: 0.3253\n",
      "epoch: 66 loss: 0.3322\n",
      "epoch: 67 loss: 0.3229\n",
      "epoch: 68 loss: 0.3275\n",
      "epoch: 69 loss: 0.3277\n",
      "val_accuracy: 0.7508\n",
      "epoch: 70 loss: 0.3303\n",
      "epoch: 71 loss: 0.3424\n",
      "epoch: 72 loss: 0.3247\n",
      "epoch: 73 loss: 0.3193\n",
      "epoch: 74 loss: 0.3382\n",
      "epoch: 75 loss: 0.3190\n",
      "epoch: 76 loss: 0.3278\n",
      "epoch: 77 loss: 0.3336\n",
      "epoch: 78 loss: 0.3301\n",
      "epoch: 79 loss: 0.3410\n",
      "val_accuracy: 0.7477\n",
      "epoch: 80 loss: 0.3123\n",
      "epoch: 81 loss: 0.3366\n",
      "epoch: 82 loss: 0.3206\n",
      "epoch: 83 loss: 0.3160\n",
      "epoch: 84 loss: 0.3188\n",
      "epoch: 85 loss: 0.3207\n",
      "epoch: 86 loss: 0.3208\n",
      "epoch: 87 loss: 0.3140\n",
      "epoch: 88 loss: 0.3122\n",
      "epoch: 89 loss: 0.3247\n",
      "val_accuracy: 0.7477\n",
      "epoch: 90 loss: 0.3156\n",
      "epoch: 91 loss: 0.3027\n",
      "epoch: 92 loss: 0.3071\n",
      "epoch: 93 loss: 0.3112\n",
      "epoch: 94 loss: 0.3061\n",
      "epoch: 95 loss: 0.3111\n",
      "epoch: 96 loss: 0.3057\n",
      "epoch: 97 loss: 0.3050\n",
      "epoch: 98 loss: 0.3172\n",
      "epoch: 99 loss: 0.3074\n",
      "val_accuracy: 0.7508\n",
      "epoch: 100 loss: 0.3112\n",
      "epoch: 101 loss: 0.3155\n",
      "epoch: 102 loss: 0.3138\n",
      "epoch: 103 loss: 0.3011\n",
      "epoch: 104 loss: 0.3091\n",
      "epoch: 105 loss: 0.3131\n",
      "epoch: 106 loss: 0.3097\n",
      "epoch: 107 loss: 0.3028\n",
      "epoch: 108 loss: 0.3136\n",
      "epoch: 109 loss: 0.3140\n",
      "val_accuracy: 0.7492\n",
      "epoch: 110 loss: 0.3106\n",
      "epoch: 111 loss: 0.2994\n",
      "epoch: 112 loss: 0.3026\n",
      "epoch: 113 loss: 0.3039\n",
      "epoch: 114 loss: 0.2860\n",
      "epoch: 115 loss: 0.2922\n",
      "epoch: 116 loss: 0.2972\n",
      "epoch: 117 loss: 0.2913\n",
      "epoch: 118 loss: 0.2948\n",
      "epoch: 119 loss: 0.3041\n",
      "val_accuracy: 0.7508\n",
      "epoch: 120 loss: 0.2967\n",
      "epoch: 121 loss: 0.2986\n",
      "epoch: 122 loss: 0.2986\n",
      "epoch: 123 loss: 0.3026\n",
      "epoch: 124 loss: 0.2954\n",
      "epoch: 125 loss: 0.2902\n",
      "epoch: 126 loss: 0.3044\n",
      "epoch: 127 loss: 0.2989\n",
      "epoch: 128 loss: 0.2820\n",
      "epoch: 129 loss: 0.2926\n",
      "val_accuracy: 0.7462\n",
      "epoch: 130 loss: 0.2920\n",
      "epoch: 131 loss: 0.2888\n",
      "epoch: 132 loss: 0.2882\n",
      "epoch: 133 loss: 0.2899\n",
      "epoch: 134 loss: 0.2974\n",
      "epoch: 135 loss: 0.2974\n",
      "epoch: 136 loss: 0.2980\n",
      "epoch: 137 loss: 0.2891\n",
      "epoch: 138 loss: 0.2901\n",
      "epoch: 139 loss: 0.2885\n",
      "val_accuracy: 0.7462\n",
      "epoch: 140 loss: 0.2986\n",
      "epoch: 141 loss: 0.2912\n",
      "epoch: 142 loss: 0.2998\n",
      "epoch: 143 loss: 0.2898\n",
      "epoch: 144 loss: 0.2868\n",
      "epoch: 145 loss: 0.2827\n",
      "epoch: 146 loss: 0.2848\n",
      "epoch: 147 loss: 0.2931\n",
      "epoch: 148 loss: 0.2982\n",
      "epoch: 149 loss: 0.2907\n",
      "val_accuracy: 0.7462\n",
      "epoch: 150 loss: 0.2790\n",
      "epoch: 151 loss: 0.2766\n",
      "epoch: 152 loss: 0.2832\n",
      "epoch: 153 loss: 0.2748\n",
      "epoch: 154 loss: 0.2797\n",
      "epoch: 155 loss: 0.2773\n",
      "epoch: 156 loss: 0.2752\n",
      "epoch: 157 loss: 0.2883\n",
      "epoch: 158 loss: 0.2891\n",
      "epoch: 159 loss: 0.2880\n",
      "val_accuracy: 0.7477\n",
      "epoch: 160 loss: 0.2798\n",
      "epoch: 161 loss: 0.2719\n",
      "epoch: 162 loss: 0.2821\n",
      "epoch: 163 loss: 0.2852\n",
      "epoch: 164 loss: 0.2724\n",
      "epoch: 165 loss: 0.2657\n",
      "epoch: 166 loss: 0.2753\n",
      "epoch: 167 loss: 0.2676\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch: 168 loss: 0.2859\n",
      "epoch: 169 loss: 0.2767\n",
      "val_accuracy: 0.7462\n",
      "epoch: 170 loss: 0.2842\n",
      "epoch: 171 loss: 0.2753\n",
      "epoch: 172 loss: 0.2794\n",
      "epoch: 173 loss: 0.2745\n",
      "epoch: 174 loss: 0.2707\n",
      "epoch: 175 loss: 0.2801\n",
      "epoch: 176 loss: 0.2768\n",
      "epoch: 177 loss: 0.2789\n",
      "epoch: 178 loss: 0.2699\n",
      "epoch: 179 loss: 0.2701\n",
      "val_accuracy: 0.7492\n",
      "epoch: 180 loss: 0.2636\n",
      "epoch: 181 loss: 0.2781\n",
      "epoch: 182 loss: 0.2760\n",
      "epoch: 183 loss: 0.2781\n",
      "epoch: 184 loss: 0.2772\n",
      "epoch: 185 loss: 0.2639\n",
      "epoch: 186 loss: 0.2585\n",
      "epoch: 187 loss: 0.2712\n",
      "epoch: 188 loss: 0.2720\n",
      "epoch: 189 loss: 0.2570\n",
      "val_accuracy: 0.7462\n",
      "epoch: 190 loss: 0.2688\n",
      "epoch: 191 loss: 0.2700\n",
      "epoch: 192 loss: 0.2624\n",
      "epoch: 193 loss: 0.2593\n",
      "epoch: 194 loss: 0.2820\n",
      "epoch: 195 loss: 0.2742\n",
      "epoch: 196 loss: 0.2750\n",
      "epoch: 197 loss: 0.2687\n",
      "epoch: 198 loss: 0.2674\n",
      "epoch: 199 loss: 0.2732\n",
      "val_accuracy: 0.7477\n",
      "epoch: 0 loss: 0.2630\n",
      "epoch: 1 loss: 0.2577\n",
      "epoch: 2 loss: 0.2644\n",
      "epoch: 3 loss: 0.2625\n",
      "epoch: 4 loss: 0.2611\n",
      "epoch: 5 loss: 0.2611\n",
      "epoch: 6 loss: 0.2575\n",
      "epoch: 7 loss: 0.2589\n",
      "epoch: 8 loss: 0.2682\n",
      "epoch: 9 loss: 0.2612\n",
      "val_accuracy: 0.7492\n",
      "epoch: 10 loss: 0.2680\n",
      "epoch: 11 loss: 0.2539\n",
      "epoch: 12 loss: 0.2634\n",
      "epoch: 13 loss: 0.2529\n",
      "epoch: 14 loss: 0.2589\n",
      "epoch: 15 loss: 0.2531\n",
      "epoch: 16 loss: 0.2681\n",
      "epoch: 17 loss: 0.2497\n",
      "epoch: 18 loss: 0.2559\n",
      "epoch: 19 loss: 0.2556\n",
      "val_accuracy: 0.7462\n",
      "epoch: 20 loss: 0.2525\n",
      "epoch: 21 loss: 0.2621\n",
      "epoch: 22 loss: 0.2508\n",
      "epoch: 23 loss: 0.2546\n",
      "epoch: 24 loss: 0.2546\n",
      "epoch: 25 loss: 0.2543\n",
      "epoch: 26 loss: 0.2550\n",
      "epoch: 27 loss: 0.2637\n",
      "epoch: 28 loss: 0.2688\n",
      "epoch: 29 loss: 0.2539\n",
      "val_accuracy: 0.7447\n",
      "epoch: 30 loss: 0.2703\n",
      "epoch: 31 loss: 0.2575\n",
      "epoch: 32 loss: 0.2538\n",
      "epoch: 33 loss: 0.2524\n",
      "epoch: 34 loss: 0.2538\n",
      "epoch: 35 loss: 0.2475\n",
      "epoch: 36 loss: 0.2531\n",
      "epoch: 37 loss: 0.2628\n",
      "epoch: 38 loss: 0.2620\n",
      "epoch: 39 loss: 0.2502\n",
      "val_accuracy: 0.7462\n",
      "epoch: 40 loss: 0.2626\n",
      "epoch: 41 loss: 0.2519\n",
      "epoch: 42 loss: 0.2433\n",
      "epoch: 43 loss: 0.2549\n",
      "epoch: 44 loss: 0.2555\n",
      "epoch: 45 loss: 0.2457\n",
      "epoch: 46 loss: 0.2409\n",
      "epoch: 47 loss: 0.2540\n",
      "epoch: 48 loss: 0.2499\n",
      "epoch: 49 loss: 0.2538\n",
      "val_accuracy: 0.7432\n",
      "epoch: 50 loss: 0.2499\n",
      "epoch: 51 loss: 0.2600\n",
      "epoch: 52 loss: 0.2545\n",
      "epoch: 53 loss: 0.2562\n",
      "epoch: 54 loss: 0.2474\n",
      "epoch: 55 loss: 0.2427\n",
      "epoch: 56 loss: 0.2469\n",
      "epoch: 57 loss: 0.2415\n",
      "epoch: 58 loss: 0.2552\n",
      "epoch: 59 loss: 0.2594\n",
      "val_accuracy: 0.7417\n",
      "epoch: 60 loss: 0.2455\n",
      "epoch: 61 loss: 0.2419\n",
      "epoch: 62 loss: 0.2459\n",
      "epoch: 63 loss: 0.2424\n",
      "epoch: 64 loss: 0.2451\n",
      "epoch: 65 loss: 0.2401\n",
      "epoch: 66 loss: 0.2415\n",
      "epoch: 67 loss: 0.2480\n",
      "epoch: 68 loss: 0.2344\n",
      "epoch: 69 loss: 0.2441\n",
      "val_accuracy: 0.7477\n",
      "epoch: 70 loss: 0.2421\n",
      "epoch: 71 loss: 0.2376\n",
      "epoch: 72 loss: 0.2457\n",
      "epoch: 73 loss: 0.2457\n",
      "epoch: 74 loss: 0.2444\n",
      "epoch: 75 loss: 0.2449\n",
      "epoch: 76 loss: 0.2390\n",
      "epoch: 77 loss: 0.2462\n",
      "epoch: 78 loss: 0.2477\n",
      "epoch: 79 loss: 0.2364\n",
      "val_accuracy: 0.7477\n",
      "epoch: 80 loss: 0.2384\n",
      "epoch: 81 loss: 0.2323\n",
      "epoch: 82 loss: 0.2402\n",
      "epoch: 83 loss: 0.2423\n",
      "epoch: 84 loss: 0.2386\n",
      "epoch: 85 loss: 0.2426\n",
      "epoch: 86 loss: 0.2355\n",
      "epoch: 87 loss: 0.2428\n",
      "epoch: 88 loss: 0.2369\n",
      "epoch: 89 loss: 0.2372\n",
      "val_accuracy: 0.7447\n",
      "epoch: 90 loss: 0.2518\n",
      "epoch: 91 loss: 0.2475\n",
      "epoch: 92 loss: 0.2301\n",
      "epoch: 93 loss: 0.2385\n",
      "epoch: 94 loss: 0.2345\n",
      "epoch: 95 loss: 0.2430\n",
      "epoch: 96 loss: 0.2384\n",
      "epoch: 97 loss: 0.2265\n",
      "epoch: 98 loss: 0.2346\n",
      "epoch: 99 loss: 0.2336\n",
      "val_accuracy: 0.7447\n",
      "epoch: 100 loss: 0.2331\n",
      "epoch: 101 loss: 0.2278\n",
      "epoch: 102 loss: 0.2339\n",
      "epoch: 103 loss: 0.2362\n",
      "epoch: 104 loss: 0.2373\n",
      "epoch: 105 loss: 0.2285\n",
      "epoch: 106 loss: 0.2338\n",
      "epoch: 107 loss: 0.2393\n",
      "epoch: 108 loss: 0.2281\n",
      "epoch: 109 loss: 0.2395\n",
      "val_accuracy: 0.7477\n",
      "epoch: 110 loss: 0.2287\n",
      "epoch: 111 loss: 0.2270\n",
      "epoch: 112 loss: 0.2416\n",
      "epoch: 113 loss: 0.2294\n",
      "epoch: 114 loss: 0.2283\n",
      "epoch: 115 loss: 0.2307\n",
      "epoch: 116 loss: 0.2311\n",
      "epoch: 117 loss: 0.2329\n",
      "epoch: 118 loss: 0.2372\n",
      "epoch: 119 loss: 0.2419\n",
      "val_accuracy: 0.7462\n",
      "epoch: 120 loss: 0.2292\n",
      "epoch: 121 loss: 0.2372\n",
      "epoch: 122 loss: 0.2333\n",
      "epoch: 123 loss: 0.2339\n",
      "epoch: 124 loss: 0.2272\n",
      "epoch: 125 loss: 0.2424\n",
      "epoch: 126 loss: 0.2222\n",
      "epoch: 127 loss: 0.2352\n",
      "epoch: 128 loss: 0.2353\n",
      "epoch: 129 loss: 0.2355\n",
      "val_accuracy: 0.7447\n",
      "epoch: 130 loss: 0.2252\n",
      "epoch: 131 loss: 0.2265\n",
      "epoch: 132 loss: 0.2280\n",
      "epoch: 133 loss: 0.2316\n",
      "epoch: 134 loss: 0.2297\n",
      "epoch: 135 loss: 0.2296\n",
      "epoch: 136 loss: 0.2343\n",
      "epoch: 137 loss: 0.2329\n",
      "epoch: 138 loss: 0.2339\n",
      "epoch: 139 loss: 0.2265\n",
      "val_accuracy: 0.7462\n",
      "epoch: 140 loss: 0.2285\n",
      "epoch: 141 loss: 0.2305\n",
      "epoch: 142 loss: 0.2243\n",
      "epoch: 143 loss: 0.2323\n",
      "epoch: 144 loss: 0.2309\n",
      "epoch: 145 loss: 0.2257\n",
      "epoch: 146 loss: 0.2374\n",
      "epoch: 147 loss: 0.2338\n",
      "epoch: 148 loss: 0.2323\n",
      "epoch: 149 loss: 0.2229\n",
      "val_accuracy: 0.7477\n",
      "epoch: 150 loss: 0.2260\n",
      "epoch: 151 loss: 0.2312\n",
      "epoch: 152 loss: 0.2244\n",
      "epoch: 153 loss: 0.2280\n",
      "epoch: 154 loss: 0.2118\n",
      "epoch: 155 loss: 0.2290\n",
      "epoch: 156 loss: 0.2187\n",
      "epoch: 157 loss: 0.2311\n",
      "epoch: 158 loss: 0.2248\n",
      "epoch: 159 loss: 0.2276\n",
      "val_accuracy: 0.7462\n",
      "epoch: 160 loss: 0.2226\n",
      "epoch: 161 loss: 0.2173\n",
      "epoch: 162 loss: 0.2205\n",
      "epoch: 163 loss: 0.2296\n",
      "epoch: 164 loss: 0.2165\n",
      "epoch: 165 loss: 0.2292\n",
      "epoch: 166 loss: 0.2178\n",
      "epoch: 167 loss: 0.2231\n",
      "epoch: 168 loss: 0.2186\n",
      "epoch: 169 loss: 0.2344\n",
      "val_accuracy: 0.7492\n",
      "epoch: 170 loss: 0.2115\n",
      "epoch: 171 loss: 0.2247\n",
      "epoch: 172 loss: 0.2147\n",
      "epoch: 173 loss: 0.2185\n",
      "epoch: 174 loss: 0.2246\n",
      "epoch: 175 loss: 0.2220\n",
      "epoch: 176 loss: 0.2213\n",
      "epoch: 177 loss: 0.2254\n",
      "epoch: 178 loss: 0.2234\n",
      "epoch: 179 loss: 0.2160\n",
      "val_accuracy: 0.7492\n",
      "epoch: 180 loss: 0.2133\n",
      "epoch: 181 loss: 0.2248\n",
      "epoch: 182 loss: 0.2215\n",
      "epoch: 183 loss: 0.2131\n",
      "epoch: 184 loss: 0.2250\n",
      "epoch: 185 loss: 0.2113\n",
      "epoch: 186 loss: 0.2220\n",
      "epoch: 187 loss: 0.2207\n",
      "epoch: 188 loss: 0.2236\n",
      "epoch: 189 loss: 0.2251\n",
      "val_accuracy: 0.7462\n",
      "epoch: 190 loss: 0.2214\n",
      "epoch: 191 loss: 0.2006\n",
      "epoch: 192 loss: 0.2121\n",
      "epoch: 193 loss: 0.2185\n",
      "epoch: 194 loss: 0.2207\n",
      "epoch: 195 loss: 0.2145\n",
      "epoch: 196 loss: 0.2277\n",
      "epoch: 197 loss: 0.2129\n",
      "epoch: 198 loss: 0.2108\n",
      "epoch: 199 loss: 0.2144\n",
      "val_accuracy: 0.7417\n",
      "mean_accuracy: 0.7652\n"
     ]
    }
   ],
   "source": [
    "model = GCNNet(features.shape[1], len(label_to_index)).to(device)\n",
    "\n",
    "max_val_acc = 0\n",
    "text_val_acc = 0\n",
    "\n",
    "accuracy = []\n",
    "for i in range(3):\n",
    "    optimizer = torch.optim.Adam(model.parameters(), lr=1e-3, weight_decay=5e-4)\n",
    "    model.train()\n",
    "    for epoch in range(200):\n",
    "        optimizer.zero_grad()\n",
    "        out = model(data)\n",
    "        loss = F.nll_loss(out[train_mask], data.y[train_mask])\n",
    "        print('epoch: %d loss: %.4f' %(epoch, loss))\n",
    "        loss.backward()\n",
    "        optimizer.step()\n",
    "\n",
    "        if((epoch + 1)% 10 == 0):\n",
    "            model.eval()\n",
    "            out= model(data)\n",
    "            _, pred = out.max(dim=1)\n",
    "            correct = int(pred[val_mask].eq(data.y[val_mask]).sum().item())\n",
    "            acc = correct / len(val_mask)\n",
    "            \n",
    "            if(acc > max_val_acc):\n",
    "                out = model(data)\n",
    "                _, pred = out.max(dim=1)\n",
    "                correct = int(pred[test_mask].eq(data.y[test_mask]).sum().item())\n",
    "                text_val_acc = correct / len(test_mask)\n",
    "            print('val_accuracy: {:.4f}'.format(acc))\n",
    "            model.train()\n",
    "    accuracy.append(text_val_acc)\n",
    "            \n",
    "print(\"mean_accuracy: %.4f\" % (sum(accuracy)/len(accuracy)))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "song",
   "language": "python",
   "name": "song"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.13"
  },
  "latex_envs": {
   "LaTeX_envs_menu_present": true,
   "autoclose": false,
   "autocomplete": true,
   "bibliofile": "biblio.bib",
   "cite_by": "apalike",
   "current_citInitial": 1,
   "eqLabelWithNumbers": true,
   "eqNumInitial": 1,
   "hotkeys": {
    "equation": "Ctrl-E",
    "itemize": "Ctrl-I"
   },
   "labels_anchors": false,
   "latex_user_defs": false,
   "report_style_numbering": false,
   "user_envs_cfg": false
  },
  "varInspector": {
   "cols": {
    "lenName": 16,
    "lenType": 16,
    "lenVar": 40
   },
   "kernels_config": {
    "python": {
     "delete_cmd_postfix": "",
     "delete_cmd_prefix": "del ",
     "library": "var_list.py",
     "varRefreshCmd": "print(var_dic_list())"
    },
    "r": {
     "delete_cmd_postfix": ") ",
     "delete_cmd_prefix": "rm(",
     "library": "var_list.r",
     "varRefreshCmd": "cat(var_dic_list()) "
    }
   },
   "types_to_exclude": [
    "module",
    "function",
    "builtin_function_or_method",
    "instance",
    "_Feature"
   ],
   "window_display": false
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
