{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "regular-coffee",
   "metadata": {},
   "source": [
    "# 1、载入数据"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "excellent-retirement",
   "metadata": {},
   "outputs": [],
   "source": [
    "from torch_geometric.datasets import Planetoid\n",
    "import torch\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "import pickle as pkl\n",
    "import networkx as nx\n",
    "import scipy.sparse as sp\n",
    "from scipy.sparse.linalg.eigen.arpack import eigsh\n",
    "import sys\n",
    "import os\n",
    "import networkx as nx\n",
    "dataset = Planetoid(root='./cora/',name='Cora')\n",
    "data = dataset[0]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "elementary-password",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "torch.Size([2708, 1433])"
      ]
     },
     "execution_count": 10,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "data.x.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "medium-familiar",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "3    818\n",
       "4    426\n",
       "2    418\n",
       "0    351\n",
       "5    298\n",
       "1    217\n",
       "6    180\n",
       "dtype: int64"
      ]
     },
     "execution_count": 11,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "pd.DataFrame(data.y).value_counts()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "id": "blank-wagon",
   "metadata": {},
   "outputs": [],
   "source": [
    "path = 'cora/Cora/raw'\n",
    "dataset_str = 'cora'\n",
    "\n",
    "\n",
    "names = ['x', 'y', 'tx', 'ty', 'allx', 'ally', 'graph']\n",
    "objects = []\n",
    "for i in range(len(names)):\n",
    "    with open(os.path.join(path, \"ind.{}.{}\".format(dataset_str, names[i])), 'rb') as f:\n",
    "        if sys.version_info > (3, 0):\n",
    "            objects.append(pkl.load(f, encoding='latin1'))\n",
    "        else:\n",
    "            objects.append(pkl.load(f))\n",
    "\n",
    "x, y, tx, ty, allx, ally, graph = tuple(objects)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "interesting-occupation",
   "metadata": {},
   "source": [
    "# 二、从边的结构上看cora数据集的边上的节点和我们的节点的label相同性"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "id": "functional-owner",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 以字典的形式保存整个图        \n",
    "neighbor_label_compare = []\n",
    "\n",
    "for i, (src, dsts) in enumerate(graph.items()):\n",
    "    same = 0\n",
    "    different = 0\n",
    "    \n",
    "    for dst in dsts:\n",
    "        if data.y[dst] == data.y[src]:\n",
    "            same +=1\n",
    "        else:\n",
    "            different +=1\n",
    "    neighbor_label_compare.append([same,different])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 45,
   "id": "norman-monitor",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>0</th>\n",
       "      <th>1</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>3</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>3</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>2</td>\n",
       "      <td>3</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>4</td>\n",
       "      <td>2</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>...</th>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2703</th>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2704</th>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2705</th>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2706</th>\n",
       "      <td>4</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2707</th>\n",
       "      <td>4</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>2708 rows × 2 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "      0  1\n",
       "0     3  0\n",
       "1     3  0\n",
       "2     2  3\n",
       "3     1  0\n",
       "4     4  2\n",
       "...  .. ..\n",
       "2703  1  0\n",
       "2704  1  0\n",
       "2705  1  0\n",
       "2706  4  0\n",
       "2707  4  0\n",
       "\n",
       "[2708 rows x 2 columns]"
      ]
     },
     "execution_count": 45,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "pd_neighbor_label_compare = pd.DataFrame(neighbor_label_compare)\n",
    "pd_neighbor_label_compare"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 46,
   "id": "divine-responsibility",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "不相似的节点的总个数统计： 2022\n"
     ]
    }
   ],
   "source": [
    "print(\"不相似的节点的总个数统计：\",pd_neighbor_label_compare[1].sum())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 47,
   "id": "silver-alarm",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "相似的节点的总个数统计： 8836\n"
     ]
    }
   ],
   "source": [
    "print(\"相似的节点的总个数统计：\",pd_neighbor_label_compare[0].sum())"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "dental-kentucky",
   "metadata": {},
   "source": [
    "# 三、从节点信息上看cora数据集中同一label的节点之间的x的相似性"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 49,
   "id": "regional-utilization",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>0</th>\n",
       "      <th>1</th>\n",
       "      <th>2</th>\n",
       "      <th>3</th>\n",
       "      <th>4</th>\n",
       "      <th>5</th>\n",
       "      <th>6</th>\n",
       "      <th>7</th>\n",
       "      <th>8</th>\n",
       "      <th>9</th>\n",
       "      <th>...</th>\n",
       "      <th>2698</th>\n",
       "      <th>2699</th>\n",
       "      <th>2700</th>\n",
       "      <th>2701</th>\n",
       "      <th>2702</th>\n",
       "      <th>2703</th>\n",
       "      <th>2704</th>\n",
       "      <th>2705</th>\n",
       "      <th>2706</th>\n",
       "      <th>2707</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.069505</td>\n",
       "      <td>0.076472</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.078567</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>0.145479</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.072739</td>\n",
       "      <td>0.152944</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.208514</td>\n",
       "      <td>0.111111</td>\n",
       "      <td>0.078567</td>\n",
       "      <td>0.178174</td>\n",
       "      <td>0.184900</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.069505</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.143509</td>\n",
       "      <td>0.136505</td>\n",
       "      <td>0.049147</td>\n",
       "      <td>0.115663</td>\n",
       "      <td>0.098295</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.046625</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>0.045502</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.045502</td>\n",
       "      <td>0.191346</td>\n",
       "      <td>0.098295</td>\n",
       "      <td>0.043478</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.098295</td>\n",
       "      <td>0.055728</td>\n",
       "      <td>0.057831</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.076472</td>\n",
       "      <td>0.143509</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.150188</td>\n",
       "      <td>0.108148</td>\n",
       "      <td>0.127257</td>\n",
       "      <td>0.162221</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.153897</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>0.100125</td>\n",
       "      <td>0.052632</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.105263</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.047836</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.108148</td>\n",
       "      <td>0.061314</td>\n",
       "      <td>0.063628</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.136505</td>\n",
       "      <td>0.150188</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.121046</td>\n",
       "      <td>0.102869</td>\n",
       "      <td>0.116642</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>0.047619</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.051434</td>\n",
       "      <td>0.091003</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>0.078567</td>\n",
       "      <td>0.049147</td>\n",
       "      <td>0.108148</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.065372</td>\n",
       "      <td>0.055556</td>\n",
       "      <td>0.062994</td>\n",
       "      <td>0.105409</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>0.051434</td>\n",
       "      <td>0.054074</td>\n",
       "      <td>0.102869</td>\n",
       "      <td>0.054074</td>\n",
       "      <td>0.111111</td>\n",
       "      <td>0.098295</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.111111</td>\n",
       "      <td>0.062994</td>\n",
       "      <td>0.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>...</th>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2703</th>\n",
       "      <td>0.208514</td>\n",
       "      <td>0.043478</td>\n",
       "      <td>0.047836</td>\n",
       "      <td>0.091003</td>\n",
       "      <td>0.098295</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.049147</td>\n",
       "      <td>0.055728</td>\n",
       "      <td>0.046625</td>\n",
       "      <td>0.120386</td>\n",
       "      <td>...</td>\n",
       "      <td>0.136505</td>\n",
       "      <td>0.047836</td>\n",
       "      <td>0.045502</td>\n",
       "      <td>0.095673</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.049147</td>\n",
       "      <td>0.111456</td>\n",
       "      <td>0.115663</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2704</th>\n",
       "      <td>0.111111</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>0.145479</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.145479</td>\n",
       "      <td>0.076472</td>\n",
       "      <td>0.078567</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.089087</td>\n",
       "      <td>0.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2705</th>\n",
       "      <td>0.078567</td>\n",
       "      <td>0.098295</td>\n",
       "      <td>0.108148</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.111111</td>\n",
       "      <td>0.130744</td>\n",
       "      <td>0.055556</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.052705</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.051434</td>\n",
       "      <td>0.054074</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.049147</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.065372</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2706</th>\n",
       "      <td>0.178174</td>\n",
       "      <td>0.055728</td>\n",
       "      <td>0.061314</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.062994</td>\n",
       "      <td>0.074125</td>\n",
       "      <td>0.062994</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.119523</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>0.174964</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.058321</td>\n",
       "      <td>0.122628</td>\n",
       "      <td>0.062994</td>\n",
       "      <td>0.111456</td>\n",
       "      <td>0.089087</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>0.148250</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2707</th>\n",
       "      <td>0.184900</td>\n",
       "      <td>0.057831</td>\n",
       "      <td>0.063628</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.076923</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.074125</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>...</td>\n",
       "      <td>0.121046</td>\n",
       "      <td>0.063628</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.127257</td>\n",
       "      <td>0.196116</td>\n",
       "      <td>0.115663</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.065372</td>\n",
       "      <td>0.148250</td>\n",
       "      <td>1.000000</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>2708 rows × 2708 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "          0         1         2         3         4         5         6     \\\n",
       "0     1.000000  0.069505  0.076472  0.000000  0.078567  0.000000  0.000000   \n",
       "1     0.069505  1.000000  0.143509  0.136505  0.049147  0.115663  0.098295   \n",
       "2     0.076472  0.143509  1.000000  0.150188  0.108148  0.127257  0.162221   \n",
       "3     0.000000  0.136505  0.150188  1.000000  0.000000  0.121046  0.102869   \n",
       "4     0.078567  0.049147  0.108148  0.000000  1.000000  0.065372  0.055556   \n",
       "...        ...       ...       ...       ...       ...       ...       ...   \n",
       "2703  0.208514  0.043478  0.047836  0.091003  0.098295  0.000000  0.049147   \n",
       "2704  0.111111  0.000000  0.000000  0.000000  0.000000  0.000000  0.000000   \n",
       "2705  0.078567  0.098295  0.108148  0.000000  0.111111  0.130744  0.055556   \n",
       "2706  0.178174  0.055728  0.061314  0.000000  0.062994  0.074125  0.062994   \n",
       "2707  0.184900  0.057831  0.063628  0.000000  0.000000  0.076923  0.000000   \n",
       "\n",
       "          7         8         9     ...      2698      2699      2700  \\\n",
       "0     0.000000  0.000000  0.000000  ...  0.145479  0.000000  0.072739   \n",
       "1     0.000000  0.046625  0.000000  ...  0.045502  0.000000  0.045502   \n",
       "2     0.000000  0.153897  0.000000  ...  0.100125  0.052632  0.000000   \n",
       "3     0.116642  0.000000  0.000000  ...  0.047619  0.000000  0.000000   \n",
       "4     0.062994  0.105409  0.000000  ...  0.051434  0.054074  0.102869   \n",
       "...        ...       ...       ...  ...       ...       ...       ...   \n",
       "2703  0.055728  0.046625  0.120386  ...  0.136505  0.047836  0.045502   \n",
       "2704  0.000000  0.000000  0.000000  ...  0.145479  0.000000  0.145479   \n",
       "2705  0.000000  0.052705  0.000000  ...  0.000000  0.000000  0.051434   \n",
       "2706  0.000000  0.119523  0.000000  ...  0.174964  0.000000  0.058321   \n",
       "2707  0.074125  0.000000  0.000000  ...  0.121046  0.063628  0.000000   \n",
       "\n",
       "          2701      2702      2703      2704      2705      2706      2707  \n",
       "0     0.152944  0.000000  0.208514  0.111111  0.078567  0.178174  0.184900  \n",
       "1     0.191346  0.098295  0.043478  0.000000  0.098295  0.055728  0.057831  \n",
       "2     0.105263  0.000000  0.047836  0.000000  0.108148  0.061314  0.063628  \n",
       "3     0.000000  0.051434  0.091003  0.000000  0.000000  0.000000  0.000000  \n",
       "4     0.054074  0.111111  0.098295  0.000000  0.111111  0.062994  0.000000  \n",
       "...        ...       ...       ...       ...       ...       ...       ...  \n",
       "2703  0.095673  0.000000  1.000000  0.000000  0.049147  0.111456  0.115663  \n",
       "2704  0.076472  0.078567  0.000000  1.000000  0.000000  0.089087  0.000000  \n",
       "2705  0.054074  0.000000  0.049147  0.000000  1.000000  0.000000  0.065372  \n",
       "2706  0.122628  0.062994  0.111456  0.089087  0.000000  1.000000  0.148250  \n",
       "2707  0.127257  0.196116  0.115663  0.000000  0.065372  0.148250  1.000000  \n",
       "\n",
       "[2708 rows x 2708 columns]"
      ]
     },
     "execution_count": 49,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn.metrics.pairwise import cosine_similarity\n",
    "m = cosine_similarity(data.x)\n",
    "pd.DataFrame(m)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 53,
   "id": "northern-narrow",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor([3, 4, 4,  ..., 3, 3, 3])"
      ]
     },
     "execution_count": 53,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "data.y"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "handled-baltimore",
   "metadata": {},
   "outputs": [],
   "source": [
    "m = cosine_similarity(data.x)\n",
    "pd.DataFrame(m)\n",
    "tmp = []\n",
    "caiyang = 300\n",
    "\n",
    "for i in range(len(data.y)):\n",
    "    if data.y[i]==6:\n",
    "        listtmp=[]\n",
    "        a=getmaxIndex(m[i],caiyang)\n",
    "        datapd = pd.DataFrame(data.y[a]==data.y[0]).value_counts()\n",
    "        simnum=0\n",
    "        diftnum=0\n",
    "\n",
    "        if True in datapd:\n",
    "            simnum = (int)(datapd[True])\n",
    "        if False in datapd:\n",
    "            diftnum = (int)(datapd[False])\n",
    "\n",
    "        listtmp.append(simnum)\n",
    "        listtmp.append(diftnum)\n",
    "        tmp.append(listtmp)\n",
    "        \n",
    "tt=pd.DataFrame(tmp)\n",
    "tt\n",
    "tt=pd.DataFrame(tmp)\n",
    "tt[1].sum()\n",
    "tt[0].sum()\n",
    "\n",
    "import torch\n",
    "import torch.nn as nn\n",
    "from torch_geometric.nn import MessagePassing\n",
    "from torch_geometric.utils import add_self_loops, degree\n",
    "\n",
    "class Net(nn.Module):\n",
    "    def __init__(self):\n",
    "        super(Net, self).__init__()\n",
    "        self.conv1 = GCNConv(dataset.num_node_features, 32)\n",
    "        self.conv2 = GCNConv(32, dataset.num_classes)\n",
    "\n",
    "    def forward(self, data):\n",
    "        x, edge_index = data.x, data.edge_index\n",
    "        x = self.conv1(x, edge_index)\n",
    "        x = F.relu(x)\n",
    "        x = F.dropout(x, training=self.training)\n",
    "        x = self.conv2(x, edge_index)\n",
    "        x = F.softmax(x, dim=1)\n",
    "\n",
    "        return x\n",
    "class GCNConv(MessagePassing):\n",
    "    def __init__(self, input_dim, output_dim):\n",
    "        super(GCNConv, self).__init__(aggr='add')\n",
    "        self.fc = nn.Linear(input_dim, output_dim)\n",
    "\n",
    "    def forward(self, x, edge_index):\n",
    "        edge_index, _ = add_self_loops(\n",
    "            edge_index=edge_index, num_nodes=x.shape[0])\n",
    "\n",
    "        x = self.fc(x)\n",
    "\n",
    "        row, col = edge_index\n",
    "        deg = degree(index=col, dtype=x.dtype)\n",
    "        deg_inv_sqrt = deg.pow(-0.5)\n",
    "        norm = deg_inv_sqrt[row] * deg_inv_sqrt[col]\n",
    "\n",
    "        return self.propagate(edge_index, x=x, norm=norm)\n",
    "\n",
    "    def message(self, x_j, norm):\n",
    "        norm = norm.view(-1, 1)\n",
    "        m = norm * x_j\n",
    "        return m\n",
    "    def update(self,aggr_out):\n",
    "        return aggr_out\n",
    "    \n",
    "import torch.optim as optim\n",
    "model = Net()\n",
    "criterion = nn.CrossEntropyLoss()\n",
    "optimizer = optim.Adam(model.parameters(), lr=0.01, weight_decay=5e-4)\n",
    "\n",
    "import torch.nn.functional as F\n",
    "model.train()\n",
    "for epoch in range(50):\n",
    "    out = model(data)\n",
    "    loss = criterion(out[data.train_mask], data.y[data.train_mask])\n",
    "\n",
    "    optimizer.zero_grad()\n",
    "    loss.backward()\n",
    "    optimizer.step()\n",
    "\n",
    "    _, pred = torch.max(out[data.train_mask], dim=1)\n",
    "    correct = (pred == data.y[data.train_mask]).sum().item()\n",
    "    acc = correct/data.train_mask.sum().item()\n",
    "\n",
    "    print('Epoch {:03d} train_loss: {:.4f} train_acc: {:.4f}'.format(\n",
    "        epoch, loss.item(), acc))\n",
    "    \n",
    "model.eval()\n",
    "out = model(data)\n",
    "loss = criterion(out[data.test_mask], data.y[data.test_mask])\n",
    "_, pred = torch.max(out[data.test_mask], dim=1)\n",
    "correct = (pred == data.y[data.test_mask]).sum().item()\n",
    "acc = correct/data.test_mask.sum().item()\n",
    "print(\"test_loss: {:.4f} test_acc: {:.4f}\".format(loss.item(), acc))\n",
    "\n",
    "data.y\n",
    "\n",
    "\n"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.12"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
