{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "97cdbbc1",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/data/home/sczc619/run/LML/anaconda3/envs/tsne/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
      "  from .autonotebook import tqdm as notebook_tqdm\n"
     ]
    }
   ],
   "source": [
    "import pickle\n",
    "import os\n",
    "import random as rd\n",
    "import numpy as np\n",
    "import copy\n",
    "import copy as cp\n",
    "import dgl\n",
    "from collections import defaultdict\n",
    "import time\n",
    "import scipy.sparse as sp\n",
    "from scipy.io import loadmat\n",
    "import torch\n",
    "import torch.nn.functional as F\n",
    "import torch.nn as nn\n",
    "from torch.nn import TransformerEncoder, TransformerEncoderLayer\n",
    "from torch_geometric.utils import to_dense_adj, subgraph\n",
    "from torch.utils.tensorboard import SummaryWriter\n",
    "from sklearn.preprocessing import MinMaxScaler\n",
    "from sklearn.manifold import TSNE\n",
    "from sklearn.model_selection import train_test_split\n",
    "from sklearn.metrics import f1_score, accuracy_score, recall_score, roc_auc_score, average_precision_score, \\\n",
    "    confusion_matrix\n",
    "\n",
    "filelist = {\n",
    "    'amz_upu': 'amz_upu_adjlists.pickle',\n",
    "    'amz_usu': 'amz_usu_adjlists.pickle',\n",
    "    'amz_uvu': 'amz_uvu_adjlists.pickle',\n",
    "    'yelp_rsr': 'yelp_rsr_adjlists.pickle',\n",
    "    'yelp_rtr': 'yelp_rtr_adjlists.pickle',\n",
    "    'yelp_rur': 'yelp_rur_adjlists.pickle'\n",
    "}\n",
    "\n",
    "file_matrix_prefix = {\n",
    "    'amz_upu': 'amazon_upu_matrix_',\n",
    "    'amz_usu': 'amazon_usu_matrix_',\n",
    "    'amz_uvu': 'amazon_uvu_matrix_',\n",
    "    'yelp_rsr': 'yelpnet_rsr_matrix_decompision_',\n",
    "    'yelp_rtr': 'yelpnet_rtr_matrix_decompision_',\n",
    "    'yelp_rur': 'yelpnet_rur_matrix_decompision_'\n",
    "}\n",
    "\n",
    "\n",
    "def create_node_subgraph(node_idx, feat_data, edge_indexs, device):\n",
    "    \"\"\"\n",
    "    为单个节点创建一阶邻居子图（所有邻居）\n",
    "    \"\"\"\n",
    "    neighbors = set()\n",
    "    for rel_idx in range(len(edge_indexs)):\n",
    "        edge_index = edge_indexs[rel_idx][0].cpu().numpy()\n",
    "        # 找出以中心点为起点的边的终点\n",
    "        rel_neighbors = edge_index[1][edge_index[0] == node_idx].tolist()\n",
    "        neighbors.update(rel_neighbors)\n",
    "\n",
    "    # 移除中心节点自身\n",
    "    neighbors.discard(node_idx)\n",
    "    neighbors = list(neighbors)\n",
    "\n",
    "    # 如果邻居太多，进行随机采样截取\n",
    "    sample_size = 399\n",
    "    if len(neighbors) > sample_size:\n",
    "        neighbors = np.random.choice(neighbors, size=sample_size, replace=False).tolist()\n",
    "\n",
    "    # 构建子图节点列表，确保中心节点是第一个\n",
    "    sub_nodes = [node_idx] + [n for n in neighbors if n != node_idx]\n",
    "\n",
    "    # 构建子图边列表\n",
    "    sub_edge_index = []\n",
    "    for rel_idx in range(len(edge_indexs)):\n",
    "        edge_index = edge_indexs[rel_idx][0].cpu().numpy()\n",
    "        mask = np.isin(edge_index[0], sub_nodes) & np.isin(edge_index[1], sub_nodes)\n",
    "        local_edges = edge_index[:, mask]\n",
    "\n",
    "        # 创建节点映射\n",
    "        node_map = {n: i for i, n in enumerate(sub_nodes)}\n",
    "\n",
    "        # 将全局索引映射到局部索引\n",
    "        if len(local_edges) > 0 and local_edges.size > 0:\n",
    "            src_nodes = [node_map[src] for src in local_edges[0]]\n",
    "            dst_nodes = [node_map[dst] for dst in local_edges[1]]\n",
    "            edge_tensor = torch.tensor([src_nodes, dst_nodes], dtype=torch.long)\n",
    "        else:\n",
    "            # 添加自环确保图不为空\n",
    "            edge_tensor = torch.tensor([[0], [0]], dtype=torch.long)\n",
    "\n",
    "        sub_edge_index.append(edge_tensor.to(device))\n",
    "\n",
    "    # 创建子图数据\n",
    "    subgraph = {\n",
    "        'features': feat_data[sub_nodes].clone(),\n",
    "        'edges': sub_edge_index,\n",
    "        'global_idx': sub_nodes\n",
    "    }\n",
    "\n",
    "    return subgraph\n",
    "\n",
    "\n",
    "def dict_to_edge_index(edge_dict):\n",
    "    source_nodes = []\n",
    "    target_nodes = []\n",
    "    for src, targets in edge_dict.items():\n",
    "        for target in targets:\n",
    "            source_nodes.append(src)\n",
    "            target_nodes.append(target)\n",
    "    edge_index = [source_nodes, target_nodes]\n",
    "    return torch.LongTensor(edge_index)\n",
    "\n",
    "\n",
    "def numpy_array_to_edge_index(np_array):\n",
    "    assert np_array.ndim == 2 and np_array.shape[0] == np_array.shape[1], \"Input must be a square matrix.\"\n",
    "    rows, cols = np.nonzero(np_array)\n",
    "    edge_index = np.vstack((rows, cols))\n",
    "    edge_index_tensor = torch.from_numpy(edge_index).long()\n",
    "    return edge_index_tensor\n",
    "\n",
    "\n",
    "def load_data(data, k=2, prefix=''):\n",
    "    pickle_file = {}\n",
    "    matrix_prefix = {}\n",
    "    for key in filelist:\n",
    "        pickle_file[key] = os.path.join(prefix, filelist[key])\n",
    "        matrix_prefix[key] = os.path.join(prefix, file_matrix_prefix[key])\n",
    "\n",
    "    if data == 'yelp':\n",
    "        data_file = loadmat(os.path.join(prefix, 'YelpChi.mat'))\n",
    "        labels = data_file['label'].flatten()\n",
    "        feat_data = data_file['features'].todense().A\n",
    "        with open(pickle_file['yelp_rur'], 'rb') as file:\n",
    "            relation1 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation1 = dict_to_edge_index(relation1)\n",
    "        relation1_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['yelp_rur'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation1_tree.append(numpy_array_to_edge_index(tree))\n",
    "        with open(pickle_file['yelp_rtr'], 'rb') as file:\n",
    "            relation2 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation2 = dict_to_edge_index(relation2)\n",
    "        relation2_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['yelp_rtr'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation2_tree.append(numpy_array_to_edge_index(tree))\n",
    "        with open(pickle_file['yelp_rsr'], 'rb') as file:\n",
    "            relation3 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation3 = dict_to_edge_index(relation3)\n",
    "        relation3_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['yelp_rsr'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation3_tree.append(numpy_array_to_edge_index(tree))\n",
    "        return [[relation1, relation1_tree], [relation2, relation2_tree],\n",
    "                [relation3, relation3_tree]], feat_data, labels\n",
    "    elif data == 'amazon':\n",
    "        data_file = loadmat(os.path.join(prefix, 'Amazon.mat'))\n",
    "        labels = data_file['label'].flatten()\n",
    "        feat_data = data_file['features'].todense().A\n",
    "        with open(pickle_file['amz_upu'], 'rb') as file:\n",
    "            relation1 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation1 = dict_to_edge_index(relation1)\n",
    "        relation1_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['amz_upu'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation1_tree.append(numpy_array_to_edge_index(tree))\n",
    "        with open(pickle_file['amz_usu'], 'rb') as file:\n",
    "            relation2 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation2 = dict_to_edge_index(relation2)\n",
    "        relation2_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['amz_usu'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation2_tree.append(numpy_array_to_edge_index(tree))\n",
    "        with open(pickle_file['amz_uvu'], 'rb') as file:\n",
    "            relation3 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation3_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['amz_uvu'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation3_tree.append(numpy_array_to_edge_index(tree))\n",
    "        relation3 = dict_to_edge_index(relation3)\n",
    "\n",
    "        return [[relation1, relation1_tree], [relation2, relation2_tree],\n",
    "                [relation3, relation3_tree]], feat_data, labels\n",
    "\n",
    "\n",
    "def normalize(mx):\n",
    "    rowsum = np.array(mx.sum(1)) + 0.01\n",
    "    r_inv = np.power(rowsum, -1).flatten()\n",
    "    r_inv[np.isinf(r_inv)] = 0.\n",
    "    r_mat_inv = sp.diags(r_inv)\n",
    "    mx = r_mat_inv.dot(mx)\n",
    "    return mx\n",
    "\n",
    "\n",
    "def pos_neg_split(nodes, labels):\n",
    "    # 正负样本分割\n",
    "    pos_nodes = []\n",
    "    neg_nodes = cp.deepcopy(nodes)\n",
    "    aux_nodes = cp.deepcopy(nodes)\n",
    "    for idx, label in enumerate(labels):\n",
    "        if label == 1:\n",
    "            pos_nodes.append(aux_nodes[idx])\n",
    "            neg_nodes.remove(aux_nodes[idx])\n",
    "\n",
    "    return pos_nodes, neg_nodes\n",
    "\n",
    "\n",
    "def undersample(pos_nodes, neg_nodes, scale=1):\n",
    "    # 对负样本进行下采样，平衡正负样本数量\n",
    "    aux_nodes = cp.deepcopy(neg_nodes)\n",
    "    aux_nodes = rd.sample(aux_nodes, k=int(len(pos_nodes) * scale))\n",
    "    batch_nodes = pos_nodes + aux_nodes\n",
    "\n",
    "    return batch_nodes\n",
    "\n",
    "\n",
    "def calculate_g_mean(y_true, y_pred):\n",
    "    cm = confusion_matrix(y_true, y_pred)\n",
    "    sensitivities = []\n",
    "    for i in range(len(cm)):\n",
    "        TP = cm[i, i]\n",
    "        FN = cm[i, :].sum() - TP\n",
    "        sensitivity = TP / (TP + FN) if (TP + FN) != 0 else 0\n",
    "        sensitivities.append(sensitivity)\n",
    "    g_mean = np.prod(sensitivities) ** (1 / len(sensitivities))\n",
    "    return g_mean\n",
    "\n",
    "\n",
    "def iterate_batches(indices, batch_size, shuffle=True):\n",
    "    \"\"\"\n",
    "        将索引列表划分为指定大小的批次\n",
    "        :param indices: 样本索引列表\n",
    "        :param batch_size: 每个批次的大小\n",
    "        :param shuffle: 是否打乱顺序\n",
    "        :return: 生成批次索引的迭代器\n",
    "    \"\"\"\n",
    "    if shuffle:\n",
    "        rd.shuffle(indices)\n",
    "    for i in range(0, len(indices), batch_size):\n",
    "        yield indices[i:i + batch_size]\n",
    "\n",
    "\n",
    "def test(idx_eval, y_eval, model, feat_data, edge_indexs, device, batch_size=64):\n",
    "    model.eval()\n",
    "    all_probs = []\n",
    "    all_labels = []\n",
    "\n",
    "    # 分批处理\n",
    "    for batch_centers in iterate_batches(idx_eval, batch_size, shuffle=False):\n",
    "        subgraph_data = []\n",
    "\n",
    "        # 为每个中心节点构建子图\n",
    "        for xi in batch_centers:\n",
    "            subgraph = create_node_subgraph(xi, feat_data, edge_indexs, device)\n",
    "            subgraph_data.append(subgraph)\n",
    "\n",
    "        # 获取中心节点预测\n",
    "        with torch.no_grad():\n",
    "            center_logits, _ = model(subgraph_data)  # [B, 2]\n",
    "            probs = torch.softmax(center_logits, dim=-1)[:, 1]  # 正类概率\n",
    "            all_probs.extend(probs.cpu().numpy())\n",
    "            all_labels.extend([labels[xi] for xi in batch_centers])\n",
    "\n",
    "    # 计算指标\n",
    "    auc_score = roc_auc_score(all_labels, all_probs)\n",
    "    ap_score = average_precision_score(all_labels, all_probs)\n",
    "    pred_labels = (np.array(all_probs) >= 0.5).astype(int)\n",
    "    f1 = f1_score(all_labels, pred_labels, average='macro')\n",
    "    g_mean = calculate_g_mean(all_labels, pred_labels)\n",
    "\n",
    "    return auc_score, ap_score, f1, g_mean\n",
    "\n",
    "\n",
    "class BSNE_Transformer(nn.Module):\n",
    "    def __init__(self, in_feat, out_feat, relation_nums=3, d_model=256,\n",
    "                 nhead=8, num_layers=3, dim_feedforward=256,\n",
    "                 drop_rate=0.5):\n",
    "        super().__init__()\n",
    "        self.relation_nums = relation_nums\n",
    "        self.d_model = d_model\n",
    "        self.nhead = nhead\n",
    "\n",
    "        self.feature_proj = nn.Sequential(\n",
    "            nn.Linear(in_feat, d_model),\n",
    "            nn.LayerNorm(d_model),\n",
    "            nn.ReLU()\n",
    "        )\n",
    "        self.norm = nn.LayerNorm(d_model)\n",
    "\n",
    "        self.relation_encoders = nn.ModuleList([\n",
    "            TransformerEncoder(\n",
    "                TransformerEncoderLayer(\n",
    "                    d_model=d_model,\n",
    "                    nhead=nhead,\n",
    "                    dim_feedforward=dim_feedforward,\n",
    "                    dropout=drop_rate,\n",
    "                    batch_first=True\n",
    "                ),\n",
    "                num_layers=num_layers\n",
    "            ) for _ in range(relation_nums)\n",
    "        ])\n",
    "\n",
    "        self.classifier = nn.Sequential(\n",
    "            nn.Linear(relation_nums * d_model, 512),\n",
    "            nn.ReLU(),\n",
    "            nn.Dropout(drop_rate),\n",
    "            nn.Linear(512, out_feat)\n",
    "        )\n",
    "        for p in self.parameters():\n",
    "            if p.dim() > 1:\n",
    "                nn.init.xavier_uniform_(p)\n",
    "\n",
    "    #         self.node_feature_extractor = nn.Sequential(\n",
    "    #             nn.Linear(relation_nums * d_model, d_model),\n",
    "    #             nn.ReLU(),\n",
    "    #             nn.LayerNorm(d_model)\n",
    "    #         )\n",
    "\n",
    "    def forward(self, subgraph_batch):\n",
    "        # 为每个子图单独处理\n",
    "        center_logits_list = []\n",
    "        center_features_list = []\n",
    "\n",
    "        for sg in subgraph_batch:\n",
    "            # 处理单个子图\n",
    "            features = self.feature_proj(sg['features'].unsqueeze(0))  # [1, num_nodes, d_model]\n",
    "            features = self.norm(features)\n",
    "\n",
    "            # 为每个关系类型单独处理\n",
    "            rel_outputs = []\n",
    "            num_nodes = features.size(1)\n",
    "\n",
    "            for rel_idx in range(self.relation_nums):\n",
    "                # 构建当前关系的邻接矩阵\n",
    "                edge_index = sg['edges'][rel_idx]\n",
    "                adj = torch.zeros(num_nodes, num_nodes,\n",
    "                                  dtype=torch.float, device=features.device)\n",
    "\n",
    "                if edge_index.size(1) > 0:\n",
    "                    src, dst = edge_index\n",
    "                    adj[src, dst] = 1.0\n",
    "\n",
    "                # 添加自环\n",
    "                adj[range(num_nodes), range(num_nodes)] = 1.0\n",
    "\n",
    "                # 创建注意力掩码\n",
    "                adj_mask = adj.masked_fill(adj == 0.0, float('-inf'))\n",
    "                adj_mask = adj_mask.masked_fill(adj == 1.0, 0.0)\n",
    "\n",
    "                # 扩展为多头注意力掩码\n",
    "                adj_mask = adj_mask.unsqueeze(0).unsqueeze(0)  # [1, 1, num_nodes, num_nodes]\n",
    "                adj_mask = adj_mask.expand(1, self.nhead, num_nodes, num_nodes)\n",
    "                adj_mask = adj_mask.reshape(-1, num_nodes, num_nodes)\n",
    "\n",
    "                # 关系编码\n",
    "                encoder_output = self.relation_encoders[rel_idx](\n",
    "                    src=features,\n",
    "                    mask=adj_mask\n",
    "                )\n",
    "                rel_outputs.append(encoder_output)\n",
    "\n",
    "            # 合并多关系特征\n",
    "            combined = torch.cat(rel_outputs, dim=-1)  # [1, num_nodes, rel*d_model]\n",
    "\n",
    "            # 提取中心节点特征（第一个节点）\n",
    "            center_features = combined[:, 0, :]  # [1, rel*d_model]\n",
    "            center_logits = self.classifier(center_features)  # [1, out_feat]\n",
    "            center_logits = F.log_softmax(center_logits, dim=-1)\n",
    "\n",
    "            center_logits_list.append(center_logits)\n",
    "            center_features_list.append(center_features)\n",
    "\n",
    "        # 将结果堆叠为批次\n",
    "        center_logits = torch.cat(center_logits_list, dim=0)\n",
    "        center_features = torch.cat(center_features_list, dim=0)\n",
    "\n",
    "        return center_logits, center_features\n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "81c3027f",
   "metadata": {},
   "outputs": [],
   "source": [
    "   "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "50a6d575",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "cuda\n",
      "loading data...\n",
      "\n",
      "=== Starting Pretraining ===\n",
      "\n",
      "=== Pretraining Epoch 0 (Center Node: 268) ===\n",
      "Step 0: local_loss: 15.389788627624512\n",
      "Step 1: local_loss: 13.527655601501465\n",
      "Step 2: local_loss: 13.780929565429688\n",
      "Step 3: local_loss: 12.211511611938477\n",
      "Step 4: local_loss: 11.396523475646973\n",
      "Step 5: local_loss: 13.192442893981934\n",
      "Step 6: local_loss: 13.487178802490234\n",
      "Step 7: local_loss: 14.252824783325195\n",
      "Step 8: local_loss: 12.561969757080078\n",
      "Step 9: local_loss: 15.55687141418457\n",
      "Step 10: local_loss: 13.742478370666504\n",
      "Step 11: local_loss: 14.827364921569824\n",
      "Step 12: local_loss: 15.468354225158691\n",
      "Step 13: local_loss: 15.83920955657959\n",
      "Step 14: local_loss: 19.346933364868164\n",
      "Step 15: local_loss: 12.937973022460938\n",
      "Step 16: local_loss: 13.31355094909668\n",
      "Step 17: local_loss: 12.166818618774414\n",
      "Step 18: local_loss: 13.225606918334961\n",
      "Step 19: local_loss: 9.533150672912598\n",
      "Step 20: local_loss: 12.087297439575195\n",
      "Step 21: local_loss: 15.74498176574707\n",
      "Step 22: local_loss: 12.316973686218262\n",
      "Step 23: local_loss: 14.303848266601562\n",
      "Step 24: local_loss: 15.12292194366455\n",
      "Step 25: local_loss: 17.165727615356445\n",
      "Step 26: local_loss: 13.159536361694336\n",
      "Step 27: local_loss: 13.039207458496094\n",
      "Step 28: local_loss: 10.939168930053711\n",
      "Step 29: local_loss: 16.71961784362793\n",
      "Step 30: local_loss: 13.858733177185059\n",
      "Step 31: local_loss: 12.41373348236084\n",
      "Step 32: local_loss: 14.289628028869629\n",
      "Step 33: local_loss: 11.042997360229492\n",
      "Step 34: local_loss: 15.820192337036133\n",
      "Step 35: local_loss: 13.537637710571289\n",
      "Step 36: local_loss: 13.035470008850098\n",
      "Step 37: local_loss: 9.444269180297852\n",
      "Step 38: local_loss: 12.743364334106445\n",
      "Step 39: local_loss: 12.491840362548828\n",
      "Step 40: local_loss: 13.95521354675293\n",
      "Step 41: local_loss: 13.603137016296387\n",
      "Step 42: local_loss: 13.685769081115723\n",
      "Step 43: local_loss: 14.266730308532715\n",
      "Step 44: local_loss: 15.338293075561523\n",
      "Step 45: local_loss: 12.039376258850098\n",
      "Step 46: local_loss: 13.800716400146484\n",
      "Step 47: local_loss: 14.04166316986084\n",
      "Step 48: local_loss: 14.879657745361328\n",
      "Step 49: local_loss: 11.56650447845459\n",
      "Step 50: local_loss: 16.971675872802734\n",
      "Step 51: local_loss: 14.01572036743164\n",
      "Step 52: local_loss: 13.901754379272461\n",
      "Step 53: local_loss: 12.882471084594727\n",
      "Step 54: local_loss: 14.888055801391602\n",
      "Step 55: local_loss: 12.0327787399292\n",
      "Step 56: local_loss: 11.188176155090332\n",
      "Step 57: local_loss: 10.452224731445312\n",
      "Step 58: local_loss: 10.114659309387207\n",
      "Step 59: local_loss: 16.319955825805664\n",
      "Step 60: local_loss: 13.508451461791992\n",
      "Step 61: local_loss: 9.245992660522461\n",
      "Step 62: local_loss: 14.867363929748535\n",
      "Step 63: local_loss: 16.693248748779297\n",
      "Step 64: local_loss: 11.398119926452637\n",
      "Step 65: local_loss: 12.122998237609863\n",
      "Step 66: local_loss: 12.77456283569336\n",
      "Step 67: local_loss: 14.840713500976562\n",
      "Step 68: local_loss: 13.4406156539917\n",
      "Step 69: local_loss: 10.114638328552246\n",
      "Step 70: local_loss: 15.128747940063477\n",
      "Step 71: local_loss: 12.242986679077148\n",
      "Step 72: local_loss: 14.763468742370605\n",
      "Step 73: local_loss: 10.729154586791992\n",
      "Step 74: local_loss: 7.629526615142822\n",
      "Step 75: local_loss: 14.471397399902344\n",
      "Step 76: local_loss: 9.116138458251953\n",
      "Step 77: local_loss: 12.083900451660156\n",
      "Step 78: local_loss: 12.005376815795898\n",
      "Step 79: local_loss: 11.154924392700195\n",
      "Step 80: local_loss: 11.150753021240234\n",
      "Step 81: local_loss: 11.627786636352539\n",
      "Step 82: local_loss: 12.867989540100098\n",
      "Step 83: local_loss: 7.123359680175781\n",
      "Step 84: local_loss: 15.141548156738281\n",
      "Step 85: local_loss: 10.695117950439453\n",
      "Step 86: local_loss: 13.745016098022461\n",
      "Step 87: local_loss: 15.42508602142334\n",
      "Step 88: local_loss: 11.554412841796875\n",
      "Step 89: local_loss: 11.781108856201172\n",
      "Step 90: local_loss: 13.450814247131348\n",
      "Step 91: local_loss: 13.055047988891602\n",
      "Step 92: local_loss: 12.673016548156738\n",
      "Step 93: local_loss: 11.125317573547363\n",
      "Step 94: local_loss: 10.750584602355957\n",
      "Step 95: local_loss: 12.777409553527832\n",
      "Step 96: local_loss: 15.056602478027344\n",
      "Step 97: local_loss: 14.114262580871582\n",
      "Step 98: local_loss: 10.300366401672363\n",
      "Step 99: local_loss: 11.891694068908691\n",
      "Step 100: local_loss: 14.906643867492676\n",
      "Step 101: local_loss: 13.331412315368652\n",
      "Step 102: local_loss: 10.401748657226562\n",
      "Step 103: local_loss: 10.055014610290527\n",
      "Step 104: local_loss: 12.850845336914062\n",
      "Step 105: local_loss: 13.001069068908691\n",
      "Step 106: local_loss: 8.79987621307373\n",
      "Step 107: local_loss: 11.129769325256348\n",
      "Step 108: local_loss: 12.266962051391602\n",
      "Step 109: local_loss: 8.737648010253906\n",
      "Step 110: local_loss: 10.249791145324707\n",
      "Step 111: local_loss: 12.461725234985352\n",
      "Step 112: local_loss: 11.845773696899414\n",
      "Step 113: local_loss: 10.196917533874512\n",
      "Step 114: local_loss: 11.538375854492188\n",
      "Step 115: local_loss: 10.183525085449219\n",
      "Step 116: local_loss: 8.133651733398438\n",
      "Step 117: local_loss: 9.399641036987305\n",
      "Step 118: local_loss: 13.868931770324707\n",
      "Step 119: local_loss: 9.73554801940918\n",
      "Step 120: local_loss: 9.401300430297852\n",
      "Step 121: local_loss: 16.01630210876465\n",
      "Step 122: local_loss: 12.55202579498291\n",
      "Step 123: local_loss: 8.989498138427734\n",
      "Step 124: local_loss: 9.883646965026855\n",
      "Step 125: local_loss: 10.455057144165039\n",
      "Step 126: local_loss: 14.180952072143555\n",
      "Step 127: local_loss: 8.153641700744629\n",
      "Step 128: local_loss: 12.343993186950684\n",
      "Step 129: local_loss: 9.296853065490723\n",
      "Step 130: local_loss: 9.804944038391113\n",
      "Step 131: local_loss: 8.153045654296875\n",
      "Step 132: local_loss: 7.585334777832031\n",
      "Step 133: local_loss: 11.472201347351074\n",
      "Step 134: local_loss: 12.380060195922852\n",
      "Step 135: local_loss: 12.45886516571045\n",
      "Step 136: local_loss: 10.704060554504395\n",
      "Step 137: local_loss: 12.174692153930664\n",
      "Step 138: local_loss: 9.58838939666748\n",
      "Step 139: local_loss: 10.281476020812988\n",
      "Step 140: local_loss: 11.285344123840332\n",
      "Step 141: local_loss: 7.676077842712402\n",
      "Step 142: local_loss: 10.965775489807129\n",
      "Step 143: local_loss: 11.368417739868164\n",
      "Step 144: local_loss: 5.79451847076416\n",
      "Step 145: local_loss: 7.206191539764404\n",
      "Step 146: local_loss: 7.111139297485352\n",
      "Step 147: local_loss: 6.703813076019287\n",
      "Step 148: local_loss: 9.565845489501953\n",
      "Step 149: local_loss: 8.7125883102417\n",
      "Step 150: local_loss: 8.360101699829102\n",
      "Step 151: local_loss: 11.054618835449219\n",
      "Step 152: local_loss: 7.5860161781311035\n",
      "Step 153: local_loss: 6.202181816101074\n",
      "Step 154: local_loss: 8.603206634521484\n",
      "Step 155: local_loss: 13.26846981048584\n",
      "Step 156: local_loss: 8.09220027923584\n",
      "Step 157: local_loss: 9.114211082458496\n",
      "Step 158: local_loss: 7.992490768432617\n",
      "Step 159: local_loss: 8.820581436157227\n",
      "Step 160: local_loss: 13.906732559204102\n",
      "Step 161: local_loss: 8.530475616455078\n",
      "Step 162: local_loss: 8.247960090637207\n",
      "Step 163: local_loss: 10.069825172424316\n",
      "Step 164: local_loss: 10.484012603759766\n",
      "Step 165: local_loss: 5.951456546783447\n",
      "Step 166: local_loss: 8.150935173034668\n",
      "Step 167: local_loss: 11.722299575805664\n",
      "Step 168: local_loss: 10.140947341918945\n",
      "Step 169: local_loss: 10.150257110595703\n",
      "Step 170: local_loss: 10.185571670532227\n",
      "Step 171: local_loss: 7.635275840759277\n",
      "Step 172: local_loss: 8.092243194580078\n",
      "Step 173: local_loss: 6.7622551918029785\n",
      "Step 174: local_loss: 8.401798248291016\n",
      "Step 175: local_loss: 8.442924499511719\n",
      "Step 176: local_loss: 7.125865936279297\n",
      "Step 177: local_loss: 8.361289978027344\n",
      "Step 178: local_loss: 9.468679428100586\n",
      "Step 179: local_loss: 7.67263650894165\n",
      "Step 180: local_loss: 6.229065418243408\n",
      "Step 181: local_loss: 7.865260124206543\n",
      "Step 182: local_loss: 8.392398834228516\n",
      "Step 183: local_loss: 6.387864589691162\n",
      "Step 184: local_loss: 7.278801441192627\n",
      "Step 185: local_loss: 8.26717472076416\n",
      "Step 186: local_loss: 8.37846851348877\n",
      "Step 187: local_loss: 8.973250389099121\n",
      "Step 188: local_loss: 8.725034713745117\n",
      "Step 189: local_loss: 5.5630574226379395\n",
      "Step 190: local_loss: 8.100778579711914\n",
      "Step 191: local_loss: 6.8071370124816895\n",
      "Step 192: local_loss: 7.925177097320557\n",
      "Step 193: local_loss: 9.231237411499023\n",
      "Step 194: local_loss: 8.62874984741211\n",
      "Step 195: local_loss: 7.724122047424316\n",
      "Step 196: local_loss: 8.203539848327637\n",
      "Step 197: local_loss: 6.942806243896484\n",
      "Step 198: local_loss: 8.150898933410645\n",
      "Step 199: local_loss: 9.076623916625977\n",
      "Step 200: local_loss: 9.190122604370117\n",
      "Step 201: local_loss: 6.857938766479492\n",
      "Step 202: local_loss: 8.000116348266602\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step 203: local_loss: 9.79782485961914\n",
      "Step 204: local_loss: 7.167975425720215\n",
      "Step 205: local_loss: 8.980412483215332\n",
      "Step 206: local_loss: 5.432132244110107\n",
      "Step 207: local_loss: 6.644516944885254\n",
      "Step 208: local_loss: 8.846068382263184\n",
      "Step 209: local_loss: 5.155630111694336\n",
      "Step 210: local_loss: 10.978875160217285\n",
      "Step 211: local_loss: 7.823081970214844\n",
      "Step 212: local_loss: 8.180797576904297\n",
      "Step 213: local_loss: 8.020896911621094\n",
      "Step 214: local_loss: 8.178037643432617\n",
      "Step 215: local_loss: 9.242452621459961\n",
      "Step 216: local_loss: 10.11743450164795\n",
      "Step 217: local_loss: 6.249276638031006\n",
      "Step 218: local_loss: 7.271458148956299\n",
      "Step 219: local_loss: 6.433661460876465\n",
      "Step 220: local_loss: 6.121213912963867\n",
      "Step 221: local_loss: 8.42912769317627\n",
      "Step 222: local_loss: 10.044957160949707\n",
      "Step 223: local_loss: 8.850004196166992\n",
      "Step 224: local_loss: 7.4416890144348145\n",
      "Step 225: local_loss: 7.933744430541992\n",
      "Step 226: local_loss: 7.118206024169922\n",
      "Step 227: local_loss: 5.09758996963501\n",
      "Step 228: local_loss: 7.848017692565918\n",
      "Step 229: local_loss: 5.20349645614624\n",
      "Step 230: local_loss: 6.041769504547119\n",
      "Step 231: local_loss: 8.843318939208984\n",
      "Step 232: local_loss: 4.95959997177124\n",
      "Step 233: local_loss: 9.561624526977539\n",
      "Step 234: local_loss: 6.567415237426758\n",
      "Step 235: local_loss: 9.725016593933105\n",
      "Step 236: local_loss: 6.46468448638916\n",
      "Step 237: local_loss: 6.624079704284668\n",
      "Step 238: local_loss: 8.760721206665039\n",
      "Step 239: local_loss: 7.0967302322387695\n",
      "Step 240: local_loss: 6.17937707901001\n",
      "Step 247: local_loss: 5.765505790710449\n",
      "Step 248: local_loss: 7.905923366546631\n",
      "Step 249: local_loss: 5.012702465057373\n",
      "Step 250: local_loss: 7.5588579177856445\n",
      "Step 251: local_loss: 9.05591869354248\n",
      "Step 252: local_loss: 8.595251083374023\n",
      "Step 253: local_loss: 7.459292888641357\n",
      "Step 254: local_loss: 6.508424282073975\n",
      "Step 255: local_loss: 5.840958118438721\n",
      "Step 256: local_loss: 6.955386638641357\n",
      "Step 257: local_loss: 7.307884216308594\n",
      "Step 258: local_loss: 7.692023277282715\n",
      "Step 259: local_loss: 6.593968868255615\n",
      "Step 260: local_loss: 7.63075065612793\n",
      "Step 261: local_loss: 6.661625862121582\n",
      "Step 262: local_loss: 8.354382514953613\n",
      "Step 263: local_loss: 5.254425048828125\n",
      "Step 264: local_loss: 8.979470252990723\n",
      "Step 265: local_loss: 6.502706050872803\n",
      "Step 266: local_loss: 6.685823440551758\n",
      "Step 267: local_loss: 9.138081550598145\n",
      "Step 268: local_loss: 7.792379379272461\n",
      "Step 269: local_loss: 5.593474388122559\n",
      "Step 270: local_loss: 5.657456874847412\n",
      "Step 271: local_loss: 5.822527885437012\n",
      "Step 272: local_loss: 6.487843036651611\n",
      "Step 273: local_loss: 5.513852119445801\n",
      "Step 274: local_loss: 8.710288047790527\n",
      "Step 275: local_loss: 7.218064785003662\n",
      "Step 276: local_loss: 7.947928428649902\n",
      "Step 277: local_loss: 5.844522953033447\n",
      "Step 278: local_loss: 7.684442520141602\n",
      "Step 279: local_loss: 8.646692276000977\n",
      "Step 280: local_loss: 5.701810359954834\n",
      "Step 281: local_loss: 7.953218460083008\n",
      "Step 282: local_loss: 4.925082206726074\n",
      "Step 283: local_loss: 5.909459590911865\n",
      "Step 284: local_loss: 7.014555931091309\n",
      "Step 285: local_loss: 8.104016304016113\n",
      "Step 286: local_loss: 8.31533432006836\n",
      "Step 287: local_loss: 9.34168529510498\n",
      "Step 288: local_loss: 6.524867534637451\n",
      "Step 289: local_loss: 5.874789714813232\n",
      "Step 290: local_loss: 6.65615177154541\n",
      "Step 291: local_loss: 6.311588764190674\n",
      "Step 292: local_loss: 4.39345121383667\n",
      "Step 293: local_loss: 6.122105121612549\n",
      "Step 294: local_loss: 7.200045585632324\n",
      "Step 295: local_loss: 9.973509788513184\n",
      "Step 296: local_loss: 9.496163368225098\n",
      "Step 297: local_loss: 4.619294166564941\n",
      "Step 298: local_loss: 6.987522125244141\n",
      "Step 299: local_loss: 7.088019847869873\n",
      "Center node 268 finished at step 300 with loss 7.0880\n",
      "\n",
      "=== Pretraining Epoch 1 (Center Node: 7572) ===\n",
      "Step 0: local_loss: 10.633697509765625\n",
      "Step 1: local_loss: 9.515543937683105\n",
      "Step 2: local_loss: 8.469472885131836\n",
      "Step 3: local_loss: 10.525026321411133\n",
      "Step 4: local_loss: 9.230826377868652\n",
      "Step 5: local_loss: 9.617643356323242\n",
      "Step 6: local_loss: 10.813843727111816\n",
      "Step 7: local_loss: 9.870243072509766\n",
      "Step 8: local_loss: 12.046396255493164\n",
      "Step 9: local_loss: 10.081023216247559\n",
      "Step 10: local_loss: 9.425230979919434\n",
      "Step 11: local_loss: 9.304695129394531\n",
      "Step 12: local_loss: 10.485795974731445\n",
      "Step 13: local_loss: 9.653363227844238\n",
      "Step 14: local_loss: 12.505184173583984\n",
      "Step 15: local_loss: 9.963926315307617\n",
      "Step 16: local_loss: 9.387178421020508\n",
      "Step 17: local_loss: 10.560395240783691\n",
      "Step 18: local_loss: 8.945452690124512\n",
      "Step 19: local_loss: 8.891746520996094\n",
      "Step 20: local_loss: 9.603294372558594\n",
      "Step 21: local_loss: 7.967311859130859\n",
      "Step 22: local_loss: 8.344194412231445\n",
      "Step 23: local_loss: 9.230424880981445\n",
      "Step 24: local_loss: 9.037376403808594\n",
      "Step 25: local_loss: 9.245410919189453\n",
      "Step 26: local_loss: 8.140174865722656\n",
      "Step 27: local_loss: 7.440564155578613\n",
      "Step 28: local_loss: 5.887589931488037\n",
      "Step 29: local_loss: 10.025474548339844\n",
      "Step 30: local_loss: 8.979024887084961\n",
      "Step 31: local_loss: 5.831176280975342\n",
      "Step 32: local_loss: 7.637392997741699\n",
      "Step 33: local_loss: 9.521324157714844\n",
      "Step 34: local_loss: 7.426764488220215\n",
      "Step 35: local_loss: 7.5296430587768555\n",
      "Step 36: local_loss: 5.951150894165039\n",
      "Step 37: local_loss: 8.99915885925293\n",
      "Step 38: local_loss: 7.247936725616455\n",
      "Step 39: local_loss: 6.324277400970459\n",
      "Step 40: local_loss: 10.498796463012695\n",
      "Step 41: local_loss: 7.816654205322266\n",
      "Step 42: local_loss: 7.622900009155273\n",
      "Step 43: local_loss: 7.706917762756348\n",
      "Step 44: local_loss: 6.341287612915039\n",
      "Step 45: local_loss: 6.446560859680176\n",
      "Step 46: local_loss: 9.737648010253906\n",
      "Step 47: local_loss: 7.633783340454102\n",
      "Step 48: local_loss: 7.887719631195068\n",
      "Step 49: local_loss: 6.509692668914795\n",
      "Step 50: local_loss: 6.14418888092041\n",
      "Step 51: local_loss: 7.011484146118164\n",
      "Step 52: local_loss: 7.344233512878418\n",
      "Step 53: local_loss: 6.437493324279785\n",
      "Step 54: local_loss: 5.264657020568848\n",
      "Step 55: local_loss: 6.122717380523682\n",
      "Step 56: local_loss: 8.072775840759277\n",
      "Step 57: local_loss: 6.773470878601074\n",
      "Step 58: local_loss: 5.838568210601807\n",
      "Step 59: local_loss: 5.497264862060547\n",
      "Step 60: local_loss: 9.669761657714844\n",
      "Step 61: local_loss: 9.771944046020508\n",
      "Step 62: local_loss: 5.902472019195557\n",
      "Step 63: local_loss: 6.713021278381348\n",
      "Step 64: local_loss: 8.36956787109375\n",
      "Step 65: local_loss: 5.78010368347168\n",
      "Step 66: local_loss: 6.10231876373291\n",
      "Step 67: local_loss: 6.035888195037842\n",
      "Step 68: local_loss: 7.476521968841553\n",
      "Step 69: local_loss: 8.81570053100586\n",
      "Step 70: local_loss: 7.16945219039917\n",
      "Step 71: local_loss: 6.741022109985352\n",
      "Step 72: local_loss: 7.309452533721924\n",
      "Step 73: local_loss: 6.597251892089844\n",
      "Step 74: local_loss: 5.9314961433410645\n",
      "Step 75: local_loss: 6.585286617279053\n",
      "Step 76: local_loss: 7.673285484313965\n",
      "Step 77: local_loss: 6.176242351531982\n",
      "Step 78: local_loss: 5.253941059112549\n",
      "Step 79: local_loss: 5.419806003570557\n",
      "Step 80: local_loss: 7.1707940101623535\n",
      "Step 81: local_loss: 8.356640815734863\n",
      "Step 82: local_loss: 5.462034225463867\n",
      "Step 83: local_loss: 7.742008209228516\n",
      "Step 84: local_loss: 4.554738521575928\n",
      "Step 85: local_loss: 6.277363300323486\n",
      "Step 86: local_loss: 4.517800331115723\n",
      "Step 87: local_loss: 5.432974815368652\n",
      "Step 88: local_loss: 7.374284267425537\n",
      "Step 89: local_loss: 4.263433933258057\n",
      "Step 90: local_loss: 6.5165581703186035\n",
      "Step 91: local_loss: 6.678136348724365\n",
      "Step 92: local_loss: 3.394320011138916\n",
      "Step 93: local_loss: 7.845839977264404\n",
      "Step 94: local_loss: 10.81982421875\n",
      "Step 102: local_loss: 6.533047676086426\n",
      "Step 103: local_loss: 8.567806243896484\n",
      "Step 104: local_loss: 10.078214645385742\n",
      "Step 105: local_loss: 7.845946788787842\n",
      "Step 106: local_loss: 5.796834468841553\n",
      "Step 107: local_loss: 9.04304027557373\n",
      "Step 108: local_loss: 7.920986652374268\n",
      "Step 109: local_loss: 8.779352188110352\n",
      "Step 110: local_loss: 5.934069633483887\n",
      "Step 111: local_loss: 7.225069522857666\n",
      "Step 112: local_loss: 8.193655014038086\n",
      "Step 113: local_loss: 6.955757141113281\n",
      "Step 114: local_loss: 6.083542823791504\n",
      "Step 115: local_loss: 7.071162223815918\n",
      "Step 116: local_loss: 7.608153820037842\n",
      "Step 117: local_loss: 8.897514343261719\n",
      "Step 118: local_loss: 9.59477424621582\n",
      "Step 119: local_loss: 6.909419059753418\n",
      "Step 120: local_loss: 7.302525997161865\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step 121: local_loss: 7.268130302429199\n",
      "Step 122: local_loss: 6.787690162658691\n",
      "Step 123: local_loss: 8.576411247253418\n",
      "Step 124: local_loss: 6.311722278594971\n",
      "Step 125: local_loss: 8.56344223022461\n",
      "Step 126: local_loss: 7.142655849456787\n",
      "Step 127: local_loss: 5.752537250518799\n",
      "Step 128: local_loss: 6.375426769256592\n",
      "Step 129: local_loss: 5.794275283813477\n",
      "Step 130: local_loss: 8.757740020751953\n",
      "Step 131: local_loss: 8.620770454406738\n",
      "Step 132: local_loss: 6.769406318664551\n",
      "Step 133: local_loss: 5.934016227722168\n",
      "Step 134: local_loss: 6.042289733886719\n",
      "Step 135: local_loss: 8.732860565185547\n",
      "Step 136: local_loss: 6.538891792297363\n",
      "Step 137: local_loss: 8.005446434020996\n",
      "Step 138: local_loss: 6.618887901306152\n",
      "Step 139: local_loss: 6.451218128204346\n",
      "Step 140: local_loss: 7.349140644073486\n",
      "Step 141: local_loss: 5.320085048675537\n",
      "Step 142: local_loss: 7.606504917144775\n",
      "Step 143: local_loss: 7.830677509307861\n",
      "Step 144: local_loss: 7.607263565063477\n",
      "Step 145: local_loss: 8.486359596252441\n",
      "Step 146: local_loss: 9.888843536376953\n",
      "Step 147: local_loss: 4.939204692840576\n",
      "Step 148: local_loss: 4.920851707458496\n",
      "Step 149: local_loss: 7.629886627197266\n",
      "Step 150: local_loss: 6.880467414855957\n",
      "Step 151: local_loss: 8.28812313079834\n",
      "Step 152: local_loss: 4.293990612030029\n",
      "Step 153: local_loss: 4.0345892906188965\n",
      "Step 154: local_loss: 5.964492321014404\n",
      "Step 155: local_loss: 6.547665596008301\n",
      "Step 156: local_loss: 6.3386993408203125\n",
      "Step 157: local_loss: 4.504891395568848\n",
      "Step 158: local_loss: 4.452935218811035\n",
      "Step 159: local_loss: 6.994785785675049\n",
      "Step 160: local_loss: 6.429827690124512\n",
      "Step 161: local_loss: 4.528418064117432\n",
      "Step 162: local_loss: 6.217209339141846\n",
      "Step 163: local_loss: 3.9269394874572754\n",
      "Step 164: local_loss: 5.556158542633057\n",
      "Step 165: local_loss: 5.8149518966674805\n",
      "Step 166: local_loss: 5.49796199798584\n",
      "Step 167: local_loss: 5.13852071762085\n",
      "Step 168: local_loss: 9.168734550476074\n",
      "Step 169: local_loss: 6.132377624511719\n",
      "Step 170: local_loss: 4.435683250427246\n",
      "Step 171: local_loss: 6.872976303100586\n",
      "Step 172: local_loss: 6.0597147941589355\n",
      "Step 173: local_loss: 6.186800956726074\n",
      "Step 174: local_loss: 7.164379119873047\n",
      "Step 175: local_loss: 6.300468444824219\n",
      "Step 176: local_loss: 5.503411293029785\n",
      "Step 177: local_loss: 5.913166522979736\n",
      "Step 178: local_loss: 5.26448917388916\n",
      "Step 179: local_loss: 8.02822208404541\n",
      "Step 180: local_loss: 6.32921028137207\n",
      "Step 181: local_loss: 7.954256534576416\n",
      "Step 182: local_loss: 7.249436855316162\n",
      "Step 183: local_loss: 5.473000526428223\n",
      "Step 184: local_loss: 6.934615135192871\n",
      "Step 185: local_loss: 5.827112674713135\n",
      "Step 186: local_loss: 6.2876152992248535\n",
      "Step 187: local_loss: 6.654335975646973\n",
      "Step 188: local_loss: 5.546435832977295\n",
      "Step 189: local_loss: 5.478569030761719\n",
      "Step 190: local_loss: 6.7137532234191895\n",
      "Step 191: local_loss: 5.805417537689209\n",
      "Step 192: local_loss: 5.752767086029053\n",
      "Step 193: local_loss: 5.485021114349365\n",
      "Step 194: local_loss: 6.561911582946777\n",
      "Step 195: local_loss: 5.111690521240234\n",
      "Step 196: local_loss: 4.326573371887207\n",
      "Step 197: local_loss: 7.2465386390686035\n",
      "Step 198: local_loss: 4.863816261291504\n",
      "Step 199: local_loss: 5.389796257019043\n",
      "Step 200: local_loss: 5.134415149688721\n",
      "Step 201: local_loss: 5.618512153625488\n",
      "Step 202: local_loss: 6.027994155883789\n",
      "Step 203: local_loss: 4.449957370758057\n",
      "Step 204: local_loss: 5.875797748565674\n",
      "Step 205: local_loss: 5.009648323059082\n",
      "Step 206: local_loss: 4.351889133453369\n",
      "Step 207: local_loss: 6.853114604949951\n",
      "Step 208: local_loss: 3.6209182739257812\n",
      "Step 209: local_loss: 4.701388359069824\n",
      "Step 210: local_loss: 6.018503665924072\n",
      "Step 211: local_loss: 4.17490291595459\n",
      "Step 212: local_loss: 7.311798572540283\n",
      "Step 213: local_loss: 4.466648101806641\n",
      "Step 214: local_loss: 3.630629777908325\n",
      "Step 215: local_loss: 6.214017391204834\n",
      "Step 216: local_loss: 6.679386615753174\n",
      "Step 217: local_loss: 5.31649923324585\n",
      "Step 218: local_loss: 5.4625563621521\n",
      "Step 219: local_loss: 4.689600467681885\n",
      "Step 220: local_loss: 5.885312557220459\n",
      "Step 221: local_loss: 6.856510162353516\n",
      "Step 222: local_loss: 5.564865589141846\n",
      "Step 223: local_loss: 6.606104373931885\n",
      "Step 224: local_loss: 4.04847526550293\n",
      "Step 225: local_loss: 4.069399356842041\n",
      "Step 226: local_loss: 5.676709175109863\n",
      "Step 227: local_loss: 5.1006760597229\n",
      "Step 228: local_loss: 3.2962403297424316\n",
      "Step 229: local_loss: 4.141419887542725\n",
      "Step 230: local_loss: 4.677087306976318\n",
      "Step 231: local_loss: 4.661378383636475\n",
      "Step 232: local_loss: 5.574448108673096\n",
      "Step 233: local_loss: 5.1008620262146\n",
      "Step 234: local_loss: 4.312441349029541\n",
      "Step 235: local_loss: 3.6440608501434326\n",
      "Step 236: local_loss: 6.7239766120910645\n",
      "Step 237: local_loss: 4.868412494659424\n",
      "Step 238: local_loss: 3.962155342102051\n",
      "Step 239: local_loss: 4.889321327209473\n",
      "Step 240: local_loss: 7.539542198181152\n",
      "Step 241: local_loss: 6.351107120513916\n",
      "Step 242: local_loss: 2.7072548866271973\n",
      "Step 243: local_loss: 4.2215118408203125\n",
      "Step 244: local_loss: 3.7207415103912354\n",
      "Step 245: local_loss: 8.174921035766602\n",
      "Step 246: local_loss: 3.859715461730957\n",
      "Step 247: local_loss: 4.164190769195557\n",
      "Step 248: local_loss: 3.701653003692627\n",
      "Step 249: local_loss: 6.333852291107178\n",
      "Step 250: local_loss: 8.147111892700195\n",
      "Step 251: local_loss: 5.001198768615723\n",
      "Step 252: local_loss: 6.007127285003662\n",
      "Step 253: local_loss: 4.1095476150512695\n",
      "Step 254: local_loss: 4.58693790435791\n",
      "Step 255: local_loss: 7.284001350402832\n",
      "Step 256: local_loss: 5.523189067840576\n",
      "Step 257: local_loss: 5.508296966552734\n",
      "Step 258: local_loss: 4.737732410430908\n",
      "Step 259: local_loss: 4.522023677825928\n",
      "Step 260: local_loss: 8.30968952178955\n",
      "Step 261: local_loss: 3.974306583404541\n",
      "Step 262: local_loss: 4.109284400939941\n",
      "Step 263: local_loss: 4.459141254425049\n",
      "Step 264: local_loss: 5.070857048034668\n",
      "Step 265: local_loss: 5.794907093048096\n",
      "Step 266: local_loss: 5.900505065917969\n",
      "Step 267: local_loss: 4.519190311431885\n",
      "Step 268: local_loss: 5.396531105041504\n",
      "Step 269: local_loss: 5.622204780578613\n",
      "Step 270: local_loss: 4.735301494598389\n",
      "Step 271: local_loss: 3.7771518230438232\n",
      "Step 272: local_loss: 5.469911098480225\n",
      "Step 273: local_loss: 5.043065547943115\n",
      "Step 274: local_loss: 6.631219863891602\n",
      "Step 275: local_loss: 3.8839807510375977\n",
      "Step 276: local_loss: 4.88412618637085\n",
      "Step 277: local_loss: 5.659491062164307\n",
      "Step 278: local_loss: 5.756929874420166\n",
      "Step 279: local_loss: 21.080650329589844\n",
      "Step 280: local_loss: 4.129738807678223\n",
      "Step 281: local_loss: 3.322561264038086\n",
      "Step 282: local_loss: 3.967344045639038\n",
      "Step 283: local_loss: 5.68834924697876\n",
      "Step 284: local_loss: 6.096014022827148\n",
      "Step 285: local_loss: 3.476881980895996\n",
      "Step 286: local_loss: 7.714211463928223\n",
      "Step 287: local_loss: 5.131633281707764\n",
      "Step 288: local_loss: 3.8461432456970215\n",
      "Step 289: local_loss: 3.795151948928833\n",
      "Step 290: local_loss: 3.5748448371887207\n",
      "Step 291: local_loss: 4.692875862121582\n",
      "Step 292: local_loss: 3.392788887023926\n",
      "Step 293: local_loss: 4.825568199157715\n",
      "Step 294: local_loss: 5.595320701599121\n",
      "Step 295: local_loss: 12.537740707397461\n",
      "Step 296: local_loss: 4.480075359344482\n",
      "Step 297: local_loss: 5.89246129989624\n",
      "Step 298: local_loss: 3.340820550918579\n",
      "Step 299: local_loss: 5.087056636810303\n",
      "Center node 7572 finished at step 300 with loss 5.0871\n",
      "\n",
      "=== Pretraining Epoch 2 (Center Node: 7964) ===\n",
      "Step 0: local_loss: 7.8062543869018555\n",
      "Step 1: local_loss: 19.97447395324707\n",
      "Step 2: local_loss: 15.827482223510742\n",
      "Step 3: local_loss: 13.959787368774414\n",
      "Step 4: local_loss: 16.34794807434082\n",
      "Step 5: local_loss: 16.86018943786621\n",
      "Step 6: local_loss: 11.991954803466797\n",
      "Step 7: local_loss: 8.090066909790039\n",
      "Step 8: local_loss: 9.350085258483887\n",
      "Step 9: local_loss: 10.239350318908691\n",
      "Step 10: local_loss: 7.817941665649414\n",
      "Step 11: local_loss: 7.773823261260986\n",
      "Step 12: local_loss: 5.895897388458252\n",
      "Step 13: local_loss: 9.391702651977539\n",
      "Step 14: local_loss: 10.263444900512695\n",
      "Step 15: local_loss: 7.571326732635498\n",
      "Step 16: local_loss: 13.261809349060059\n",
      "Step 17: local_loss: 6.7984466552734375\n",
      "Step 18: local_loss: 6.74925422668457\n",
      "Step 19: local_loss: 5.929356098175049\n",
      "Step 20: local_loss: 5.592889308929443\n",
      "Step 21: local_loss: 8.328665733337402\n",
      "Step 22: local_loss: 7.0692057609558105\n",
      "Step 23: local_loss: 9.605996131896973\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step 24: local_loss: 7.581935405731201\n",
      "Step 25: local_loss: 3.9667091369628906\n",
      "Step 26: local_loss: 8.067046165466309\n",
      "Step 27: local_loss: 8.746600151062012\n",
      "Step 28: local_loss: 6.732631683349609\n",
      "Step 29: local_loss: 11.570493698120117\n",
      "Step 30: local_loss: 6.768293380737305\n",
      "Step 31: local_loss: 6.055315017700195\n",
      "Step 32: local_loss: 8.622142791748047\n",
      "Step 33: local_loss: 5.4808478355407715\n",
      "Step 34: local_loss: 10.404056549072266\n",
      "Step 35: local_loss: 3.6838274002075195\n",
      "Step 36: local_loss: 7.19943380355835\n",
      "Step 37: local_loss: 5.2835283279418945\n",
      "Step 38: local_loss: 7.817764759063721\n",
      "Step 39: local_loss: 13.156954765319824\n",
      "Step 40: local_loss: 6.943287372589111\n",
      "Step 41: local_loss: 5.3834099769592285\n",
      "Step 42: local_loss: 5.658747673034668\n",
      "Step 43: local_loss: 6.085025787353516\n",
      "Step 44: local_loss: 8.10167121887207\n",
      "Step 45: local_loss: 8.907902717590332\n",
      "Step 46: local_loss: 9.335152626037598\n",
      "Step 47: local_loss: 5.684711456298828\n",
      "Step 48: local_loss: 8.32563304901123\n",
      "Step 49: local_loss: 7.046008586883545\n",
      "Step 50: local_loss: 8.909269332885742\n",
      "Step 51: local_loss: 7.210567474365234\n",
      "Step 52: local_loss: 10.989925384521484\n",
      "Step 53: local_loss: 8.245378494262695\n",
      "Step 54: local_loss: 6.7203474044799805\n",
      "Step 55: local_loss: 9.493918418884277\n",
      "Step 56: local_loss: 5.288102626800537\n",
      "Step 57: local_loss: 7.707095146179199\n",
      "Step 58: local_loss: 9.36137866973877\n",
      "Step 59: local_loss: 5.109294414520264\n",
      "Step 60: local_loss: 5.190447807312012\n",
      "Step 61: local_loss: 6.7469587326049805\n",
      "Step 62: local_loss: 2.9164977073669434\n",
      "Step 63: local_loss: 8.882181167602539\n",
      "Step 64: local_loss: 8.868743896484375\n",
      "Step 65: local_loss: 6.147892475128174\n",
      "Step 66: local_loss: 5.564706802368164\n",
      "Step 67: local_loss: 5.836848258972168\n",
      "Step 68: local_loss: 3.6276378631591797\n",
      "Step 69: local_loss: 12.821915626525879\n",
      "Step 70: local_loss: 7.768304347991943\n",
      "Step 71: local_loss: 4.765543460845947\n",
      "Step 72: local_loss: 9.046619415283203\n",
      "Step 73: local_loss: 8.402471542358398\n",
      "Step 74: local_loss: 6.071056365966797\n",
      "Step 75: local_loss: 3.990769147872925\n",
      "Step 76: local_loss: 6.51112699508667\n",
      "Step 77: local_loss: 5.357930660247803\n",
      "Step 78: local_loss: 7.739706993103027\n",
      "Step 79: local_loss: 8.811163902282715\n",
      "Step 80: local_loss: 6.184969425201416\n",
      "Step 81: local_loss: 4.263511657714844\n",
      "Step 82: local_loss: 9.122758865356445\n",
      "Step 83: local_loss: 6.143017292022705\n",
      "Step 84: local_loss: 6.659907341003418\n",
      "Step 85: local_loss: 8.196310997009277\n",
      "Step 86: local_loss: 7.291909694671631\n",
      "Step 87: local_loss: 7.7446160316467285\n",
      "Step 88: local_loss: 10.885271072387695\n",
      "Step 89: local_loss: 4.739809513092041\n",
      "Step 90: local_loss: 5.125986099243164\n",
      "Step 91: local_loss: 8.456562042236328\n",
      "Step 92: local_loss: 7.387362480163574\n",
      "Step 93: local_loss: 8.980477333068848\n",
      "Step 94: local_loss: 4.500724792480469\n",
      "Step 95: local_loss: 6.326855182647705\n",
      "Step 96: local_loss: 5.996567249298096\n",
      "Step 97: local_loss: 4.068113803863525\n",
      "Step 98: local_loss: 7.508274078369141\n",
      "Step 99: local_loss: 7.916121006011963\n",
      "Step 100: local_loss: 5.647647380828857\n",
      "Step 101: local_loss: 5.8957319259643555\n",
      "Step 102: local_loss: 8.533473014831543\n",
      "Step 103: local_loss: 5.872945308685303\n",
      "Step 104: local_loss: 6.130306243896484\n",
      "Step 105: local_loss: 3.78691029548645\n",
      "Step 106: local_loss: 5.463764667510986\n",
      "Step 107: local_loss: 8.841014862060547\n",
      "Step 108: local_loss: 3.2668204307556152\n",
      "Step 109: local_loss: 6.757760047912598\n",
      "Step 110: local_loss: 6.552928924560547\n",
      "Step 111: local_loss: 7.758728981018066\n",
      "Step 112: local_loss: 8.637036323547363\n",
      "Step 113: local_loss: 3.2816929817199707\n",
      "Step 114: local_loss: 5.076752185821533\n",
      "Step 115: local_loss: 4.542669773101807\n",
      "Step 116: local_loss: 7.562870502471924\n",
      "Step 117: local_loss: 6.651047706604004\n",
      "Step 118: local_loss: 8.071277618408203\n",
      "Step 119: local_loss: 7.966808319091797\n",
      "Step 120: local_loss: 6.347926139831543\n",
      "Step 121: local_loss: 8.985797882080078\n",
      "Step 122: local_loss: 4.869752883911133\n",
      "Step 123: local_loss: 8.8538818359375\n",
      "Step 124: local_loss: 7.351747989654541\n",
      "Step 125: local_loss: 9.571062088012695\n",
      "Step 126: local_loss: 5.6651434898376465\n",
      "Step 127: local_loss: 4.970188617706299\n",
      "Step 128: local_loss: 7.436493873596191\n",
      "Step 129: local_loss: 5.45133638381958\n",
      "Step 130: local_loss: 10.030601501464844\n",
      "Step 131: local_loss: 5.780828952789307\n",
      "Step 132: local_loss: 3.6733806133270264\n",
      "Step 133: local_loss: 7.20620059967041\n",
      "Step 134: local_loss: 7.377282619476318\n",
      "Step 135: local_loss: 7.882343769073486\n",
      "Step 136: local_loss: 11.738903999328613\n",
      "Step 137: local_loss: 5.939385890960693\n",
      "Step 138: local_loss: 8.010408401489258\n",
      "Step 139: local_loss: 5.480830192565918\n",
      "Step 140: local_loss: 6.004765510559082\n",
      "Step 141: local_loss: 5.21407413482666\n",
      "Step 142: local_loss: 6.606234550476074\n",
      "Step 143: local_loss: 10.244865417480469\n",
      "Step 144: local_loss: 6.2238054275512695\n",
      "Step 145: local_loss: 6.3988847732543945\n",
      "Step 146: local_loss: 6.8179121017456055\n",
      "Step 147: local_loss: 4.898005962371826\n",
      "Step 148: local_loss: 4.737642765045166\n",
      "Step 149: local_loss: 5.378176212310791\n",
      "Step 150: local_loss: 6.037137031555176\n",
      "Step 151: local_loss: 7.582418918609619\n",
      "Step 152: local_loss: 5.678469657897949\n",
      "Step 153: local_loss: 8.690828323364258\n",
      "Step 154: local_loss: 6.205703258514404\n",
      "Step 155: local_loss: 7.989455699920654\n",
      "Step 156: local_loss: 5.843118667602539\n",
      "Step 157: local_loss: 5.590135097503662\n",
      "Step 158: local_loss: 3.8553991317749023\n",
      "Step 159: local_loss: 6.6036810874938965\n",
      "Step 160: local_loss: 6.5354390144348145\n",
      "Step 161: local_loss: 7.64204216003418\n",
      "Step 162: local_loss: 6.264925479888916\n",
      "Step 163: local_loss: 4.935963153839111\n",
      "Step 164: local_loss: 7.091278553009033\n",
      "Step 165: local_loss: 9.335806846618652\n",
      "Step 166: local_loss: 6.560269355773926\n",
      "Step 167: local_loss: 3.8738672733306885\n",
      "Step 168: local_loss: 3.8002164363861084\n",
      "Step 169: local_loss: 8.415470123291016\n",
      "Step 170: local_loss: 5.887710094451904\n",
      "Step 171: local_loss: 12.569204330444336\n",
      "Step 172: local_loss: 3.17130184173584\n",
      "Step 173: local_loss: 5.995274066925049\n",
      "Step 174: local_loss: 7.306854724884033\n",
      "Step 175: local_loss: 7.0444207191467285\n",
      "Step 176: local_loss: 7.329307556152344\n",
      "Step 177: local_loss: 5.664040565490723\n",
      "Step 178: local_loss: 5.1712870597839355\n",
      "Step 179: local_loss: 4.772835731506348\n",
      "Step 180: local_loss: 6.5085344314575195\n",
      "Step 181: local_loss: 3.137676239013672\n",
      "Step 182: local_loss: 4.872485160827637\n",
      "Step 183: local_loss: 9.256660461425781\n",
      "Step 184: local_loss: 8.384810447692871\n",
      "Step 185: local_loss: 5.104182243347168\n",
      "Step 186: local_loss: 5.4047112464904785\n",
      "Step 187: local_loss: 5.493386268615723\n",
      "Step 188: local_loss: 8.559550285339355\n",
      "Step 189: local_loss: 6.613410949707031\n",
      "Step 190: local_loss: 9.2553071975708\n",
      "Step 191: local_loss: 5.804757595062256\n",
      "Step 192: local_loss: 4.421901702880859\n",
      "Step 193: local_loss: 8.088611602783203\n",
      "Step 194: local_loss: 8.372262954711914\n",
      "Step 195: local_loss: 6.193528175354004\n",
      "Step 196: local_loss: 7.413530349731445\n",
      "Step 197: local_loss: 6.122894287109375\n",
      "Step 198: local_loss: 7.156423091888428\n",
      "Step 199: local_loss: 6.855951309204102\n",
      "Step 200: local_loss: 8.940713882446289\n",
      "Step 201: local_loss: 6.269659042358398\n",
      "Step 202: local_loss: 5.743690490722656\n",
      "Step 203: local_loss: 5.52795934677124\n",
      "Step 204: local_loss: 9.718388557434082\n",
      "Step 205: local_loss: 8.331814765930176\n",
      "Step 206: local_loss: 5.666140556335449\n",
      "Step 207: local_loss: 5.885111331939697\n",
      "Step 208: local_loss: 7.179974555969238\n",
      "Step 209: local_loss: 4.419986248016357\n",
      "Step 210: local_loss: 7.320746898651123\n",
      "Step 211: local_loss: 7.424182891845703\n",
      "Step 212: local_loss: 6.321920394897461\n",
      "Step 213: local_loss: 4.433134078979492\n",
      "Step 214: local_loss: 8.609319686889648\n",
      "Step 215: local_loss: 6.75377893447876\n",
      "Step 216: local_loss: 7.173640251159668\n",
      "Step 217: local_loss: 3.082735776901245\n",
      "Step 218: local_loss: 6.037686347961426\n",
      "Step 219: local_loss: 5.92421817779541\n",
      "Step 220: local_loss: 6.454864025115967\n",
      "Step 221: local_loss: 3.993941307067871\n",
      "Step 222: local_loss: 6.48738431930542\n",
      "Step 223: local_loss: 8.894829750061035\n",
      "Step 224: local_loss: 7.309532165527344\n",
      "Step 225: local_loss: 5.818268299102783\n",
      "Step 226: local_loss: 6.446331024169922\n",
      "Step 227: local_loss: 5.817898750305176\n",
      "Step 228: local_loss: 7.11539363861084\n",
      "Step 229: local_loss: 6.689654350280762\n",
      "Step 230: local_loss: 5.601927757263184\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step 231: local_loss: 8.408913612365723\n",
      "Step 232: local_loss: 4.462522506713867\n",
      "Step 233: local_loss: 5.022620677947998\n",
      "Step 234: local_loss: 3.0989267826080322\n",
      "Step 235: local_loss: 5.8955183029174805\n",
      "Step 236: local_loss: 7.848023414611816\n",
      "Step 237: local_loss: 4.733550071716309\n",
      "Step 238: local_loss: 6.984774112701416\n",
      "Step 239: local_loss: 6.4547224044799805\n",
      "Step 240: local_loss: 6.267223358154297\n",
      "Step 241: local_loss: 6.187167167663574\n",
      "Step 242: local_loss: 5.469841480255127\n",
      "Step 243: local_loss: 4.45660924911499\n",
      "Step 244: local_loss: 7.1103410720825195\n",
      "Step 245: local_loss: 4.109297752380371\n",
      "Step 246: local_loss: 7.69744348526001\n",
      "Step 247: local_loss: 3.836744785308838\n",
      "Step 248: local_loss: 10.125497817993164\n",
      "Step 249: local_loss: 4.417964458465576\n",
      "Step 250: local_loss: 7.990899085998535\n",
      "Step 251: local_loss: 3.8726561069488525\n",
      "Step 252: local_loss: 9.135355949401855\n",
      "Step 253: local_loss: 6.7936787605285645\n",
      "Step 254: local_loss: 11.088400840759277\n",
      "Step 255: local_loss: 5.953876495361328\n",
      "Step 256: local_loss: 8.677396774291992\n",
      "Step 257: local_loss: 9.317597389221191\n",
      "Step 258: local_loss: 8.822807312011719\n",
      "Step 259: local_loss: 4.823565483093262\n",
      "Step 260: local_loss: 5.135767936706543\n",
      "Step 261: local_loss: 4.917187690734863\n",
      "Step 262: local_loss: 6.74074649810791\n",
      "Step 263: local_loss: 4.9472270011901855\n",
      "Step 264: local_loss: 6.484796524047852\n",
      "Step 265: local_loss: 3.9497079849243164\n",
      "Step 266: local_loss: 8.047688484191895\n",
      "Step 267: local_loss: 7.799505710601807\n",
      "Step 268: local_loss: 8.12790584564209\n",
      "Step 269: local_loss: 5.028875827789307\n",
      "Step 270: local_loss: 6.974603652954102\n",
      "Step 271: local_loss: 3.1580495834350586\n",
      "Step 272: local_loss: 4.474323272705078\n",
      "Step 273: local_loss: 3.4451334476470947\n",
      "Step 274: local_loss: 6.416056156158447\n",
      "Step 275: local_loss: 10.140639305114746\n",
      "Step 276: local_loss: 3.8562893867492676\n",
      "Step 277: local_loss: 3.7923007011413574\n",
      "Step 278: local_loss: 4.491055965423584\n",
      "Step 279: local_loss: 7.434711456298828\n",
      "Step 280: local_loss: 4.561394691467285\n",
      "Step 281: local_loss: 9.84241771697998\n",
      "Step 282: local_loss: 6.778849124908447\n",
      "Step 283: local_loss: 6.3921051025390625\n",
      "Step 284: local_loss: 5.286775588989258\n",
      "Step 285: local_loss: 4.8147454261779785\n",
      "Step 286: local_loss: 5.092586994171143\n",
      "Step 287: local_loss: 7.312439918518066\n",
      "Step 288: local_loss: 5.465067386627197\n",
      "Step 289: local_loss: 5.873902797698975\n",
      "Step 290: local_loss: 7.535616397857666\n",
      "Step 291: local_loss: 5.28479528427124\n",
      "Step 292: local_loss: 8.110724449157715\n",
      "Step 293: local_loss: 7.654667854309082\n",
      "Step 294: local_loss: 3.820986032485962\n",
      "Step 295: local_loss: 6.819307804107666\n",
      "Step 296: local_loss: 6.636930465698242\n",
      "Step 297: local_loss: 7.80921745300293\n",
      "Step 298: local_loss: 6.110388278961182\n",
      "Step 299: local_loss: 7.71241569519043\n",
      "Center node 7964 finished at step 300 with loss 7.7124\n",
      "\n",
      "=== Pretraining Epoch 3 (Center Node: 7312) ===\n",
      "Step 0: local_loss: 12.418644905090332\n",
      "Step 1: local_loss: 8.67261791229248\n",
      "Step 2: local_loss: 10.320762634277344\n",
      "Step 3: local_loss: 9.799715995788574\n",
      "Step 4: local_loss: 11.47706127166748\n",
      "Step 5: local_loss: 10.807003021240234\n",
      "Step 6: local_loss: 10.00656509399414\n",
      "Step 7: local_loss: 12.451115608215332\n",
      "Step 8: local_loss: 10.6394681930542\n",
      "Step 9: local_loss: 12.835284233093262\n",
      "Step 10: local_loss: 10.724557876586914\n",
      "Step 11: local_loss: 11.318890571594238\n",
      "Step 12: local_loss: 9.324053764343262\n",
      "Step 13: local_loss: 9.973148345947266\n",
      "Step 14: local_loss: 9.596858978271484\n",
      "Step 15: local_loss: 14.445237159729004\n",
      "Step 16: local_loss: 8.892019271850586\n",
      "Step 17: local_loss: 10.250506401062012\n",
      "Step 18: local_loss: 12.428077697753906\n",
      "Step 19: local_loss: 11.316259384155273\n",
      "Step 20: local_loss: 9.715662002563477\n",
      "Step 21: local_loss: 10.143455505371094\n",
      "Step 22: local_loss: 8.265140533447266\n",
      "Step 23: local_loss: 9.843137741088867\n",
      "Step 24: local_loss: 8.921689987182617\n",
      "Step 25: local_loss: 11.305654525756836\n",
      "Step 26: local_loss: 11.360965728759766\n",
      "Step 27: local_loss: 11.056164741516113\n",
      "Step 28: local_loss: 8.971920013427734\n",
      "Step 29: local_loss: 10.835492134094238\n",
      "Step 30: local_loss: 8.015894889831543\n",
      "Step 31: local_loss: 12.968671798706055\n",
      "Step 32: local_loss: 10.534907341003418\n",
      "Step 33: local_loss: 8.530695915222168\n",
      "Step 34: local_loss: 9.284394264221191\n",
      "Step 35: local_loss: 13.490056037902832\n",
      "Step 36: local_loss: 8.978109359741211\n",
      "Step 37: local_loss: 10.774430274963379\n",
      "Step 38: local_loss: 9.164217948913574\n",
      "Step 39: local_loss: 9.368037223815918\n",
      "Step 40: local_loss: 9.935040473937988\n",
      "Step 41: local_loss: 9.87081241607666\n",
      "Step 42: local_loss: 10.204833030700684\n",
      "Step 43: local_loss: 9.960365295410156\n",
      "Step 44: local_loss: 8.45146656036377\n",
      "Step 45: local_loss: 9.074488639831543\n",
      "Step 46: local_loss: 11.639046669006348\n",
      "Step 47: local_loss: 8.345898628234863\n",
      "Step 48: local_loss: 8.468833923339844\n",
      "Step 49: local_loss: 10.431265830993652\n",
      "Step 50: local_loss: 11.080415725708008\n",
      "Step 51: local_loss: 9.629631042480469\n",
      "Step 52: local_loss: 7.816812515258789\n",
      "Step 53: local_loss: 11.249740600585938\n",
      "Step 54: local_loss: 7.907647132873535\n",
      "Step 55: local_loss: 9.473554611206055\n",
      "Step 56: local_loss: 10.085837364196777\n",
      "Step 57: local_loss: 7.650256633758545\n",
      "Step 58: local_loss: 8.535359382629395\n",
      "Step 59: local_loss: 8.980719566345215\n",
      "Step 60: local_loss: 8.285270690917969\n",
      "Step 61: local_loss: 9.67074203491211\n",
      "Step 62: local_loss: 10.042830467224121\n",
      "Step 63: local_loss: 11.136472702026367\n",
      "Step 64: local_loss: 10.969643592834473\n",
      "Step 65: local_loss: 10.455673217773438\n",
      "Step 66: local_loss: 12.741580963134766\n",
      "Step 67: local_loss: 7.802871227264404\n",
      "Step 68: local_loss: 10.231802940368652\n",
      "Step 69: local_loss: 9.21545696258545\n",
      "Step 70: local_loss: 8.241673469543457\n",
      "Step 71: local_loss: 11.000312805175781\n",
      "Step 72: local_loss: 9.526785850524902\n",
      "Step 73: local_loss: 10.122308731079102\n",
      "Step 74: local_loss: 11.442459106445312\n",
      "Step 75: local_loss: 7.335236072540283\n",
      "Step 76: local_loss: 8.32940673828125\n",
      "Step 77: local_loss: 9.838558197021484\n",
      "Step 78: local_loss: 9.324378967285156\n",
      "Step 79: local_loss: 8.732778549194336\n",
      "Step 80: local_loss: 11.855484008789062\n",
      "Step 81: local_loss: 9.731893539428711\n",
      "Step 82: local_loss: 11.163602828979492\n",
      "Step 83: local_loss: 8.501532554626465\n",
      "Step 84: local_loss: 9.049886703491211\n",
      "Step 85: local_loss: 8.995706558227539\n",
      "Step 86: local_loss: 8.908926010131836\n",
      "Step 87: local_loss: 11.694693565368652\n",
      "Step 88: local_loss: 10.58922004699707\n",
      "Step 89: local_loss: 8.231334686279297\n",
      "Step 90: local_loss: 11.19454288482666\n",
      "Step 91: local_loss: 9.21061897277832\n",
      "Step 92: local_loss: 9.097829818725586\n",
      "Step 93: local_loss: 6.891042232513428\n",
      "Step 94: local_loss: 8.51889705657959\n",
      "Step 95: local_loss: 10.956061363220215\n",
      "Step 96: local_loss: 8.602547645568848\n",
      "Step 97: local_loss: 10.652909278869629\n",
      "Step 98: local_loss: 9.023526191711426\n",
      "Step 99: local_loss: 9.145079612731934\n",
      "Step 100: local_loss: 10.858853340148926\n",
      "Step 101: local_loss: 11.109707832336426\n",
      "Step 102: local_loss: 8.520526885986328\n",
      "Step 103: local_loss: 7.331157684326172\n",
      "Step 104: local_loss: 12.88192367553711\n",
      "Step 105: local_loss: 9.864538192749023\n",
      "Step 106: local_loss: 11.37113094329834\n",
      "Step 107: local_loss: 9.46229076385498\n",
      "Step 108: local_loss: 7.909603118896484\n",
      "Step 109: local_loss: 8.409961700439453\n",
      "Step 110: local_loss: 8.81022834777832\n",
      "Step 111: local_loss: 10.722012519836426\n",
      "Step 112: local_loss: 9.728097915649414\n",
      "Step 113: local_loss: 8.919229507446289\n",
      "Step 114: local_loss: 9.155499458312988\n",
      "Step 115: local_loss: 9.413520812988281\n",
      "Step 116: local_loss: 10.100564956665039\n",
      "Step 117: local_loss: 7.452524662017822\n",
      "Step 118: local_loss: 10.127218246459961\n",
      "Step 119: local_loss: 10.434234619140625\n",
      "Step 120: local_loss: 10.297865867614746\n",
      "Step 121: local_loss: 8.51522445678711\n",
      "Step 122: local_loss: 9.404325485229492\n",
      "Step 123: local_loss: 8.482248306274414\n",
      "Step 124: local_loss: 8.16829776763916\n",
      "Step 125: local_loss: 9.806652069091797\n",
      "Step 126: local_loss: 8.458819389343262\n",
      "Step 127: local_loss: 9.814748764038086\n",
      "Step 128: local_loss: 8.26126480102539\n",
      "Step 129: local_loss: 6.9380364418029785\n",
      "Step 130: local_loss: 10.051398277282715\n",
      "Step 131: local_loss: 9.328510284423828\n",
      "Step 132: local_loss: 8.575751304626465\n",
      "Step 133: local_loss: 8.307372093200684\n",
      "Step 134: local_loss: 9.078493118286133\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step 135: local_loss: 6.411531925201416\n",
      "Step 136: local_loss: 8.424795150756836\n",
      "Step 137: local_loss: 7.432475566864014\n",
      "Step 138: local_loss: 9.190906524658203\n",
      "Step 139: local_loss: 8.502873420715332\n",
      "Step 140: local_loss: 8.094745635986328\n",
      "Step 141: local_loss: 6.650269508361816\n",
      "Step 142: local_loss: 8.400318145751953\n",
      "Step 143: local_loss: 8.673213005065918\n",
      "Step 144: local_loss: 8.10556411743164\n",
      "Step 145: local_loss: 7.282601356506348\n",
      "Step 146: local_loss: 5.886459827423096\n",
      "Step 147: local_loss: 6.573275089263916\n",
      "Step 148: local_loss: 7.857115268707275\n",
      "Step 149: local_loss: 5.866203308105469\n",
      "Step 150: local_loss: 7.851808071136475\n",
      "Step 151: local_loss: 6.023086071014404\n",
      "Step 152: local_loss: 9.846853256225586\n",
      "Step 153: local_loss: 7.604347229003906\n",
      "Step 154: local_loss: 7.744476795196533\n",
      "Step 155: local_loss: 5.9013190269470215\n",
      "Step 156: local_loss: 7.7472381591796875\n",
      "Step 157: local_loss: 7.279572486877441\n",
      "Step 158: local_loss: 8.00584888458252\n",
      "Step 159: local_loss: 7.175803184509277\n",
      "Step 160: local_loss: 7.304490089416504\n",
      "Step 161: local_loss: 7.502886772155762\n",
      "Step 162: local_loss: 8.092781066894531\n",
      "Step 163: local_loss: 9.564618110656738\n",
      "Step 164: local_loss: 7.1832756996154785\n",
      "Step 165: local_loss: 7.345681190490723\n",
      "Step 166: local_loss: 7.861001968383789\n",
      "Step 167: local_loss: 7.437257289886475\n",
      "Step 168: local_loss: 7.092348098754883\n",
      "Step 169: local_loss: 7.914054870605469\n",
      "Step 170: local_loss: 8.494120597839355\n",
      "Step 171: local_loss: 8.157797813415527\n",
      "Step 172: local_loss: 7.169994354248047\n",
      "Step 173: local_loss: 8.088069915771484\n",
      "Step 174: local_loss: 8.24664306640625\n",
      "Step 175: local_loss: 8.212132453918457\n",
      "Step 176: local_loss: 8.835345268249512\n",
      "Step 177: local_loss: 8.329524993896484\n",
      "Step 178: local_loss: 7.458529949188232\n",
      "Step 179: local_loss: 8.35143756866455\n",
      "Step 180: local_loss: 8.736720085144043\n",
      "Step 181: local_loss: 6.67512845993042\n",
      "Step 182: local_loss: 9.5036039352417\n",
      "Step 183: local_loss: 8.70897102355957\n",
      "Step 184: local_loss: 8.455859184265137\n",
      "Step 185: local_loss: 8.572237968444824\n",
      "Step 186: local_loss: 6.358479022979736\n",
      "Step 187: local_loss: 7.118957996368408\n",
      "Step 188: local_loss: 7.617453098297119\n",
      "Step 189: local_loss: 9.66484546661377\n",
      "Step 190: local_loss: 7.078917503356934\n",
      "Step 191: local_loss: 9.410309791564941\n",
      "Step 192: local_loss: 6.829375743865967\n",
      "Step 193: local_loss: 6.553097248077393\n",
      "Step 194: local_loss: 7.349827289581299\n",
      "Step 195: local_loss: 10.060266494750977\n",
      "Step 196: local_loss: 7.101010799407959\n",
      "Step 197: local_loss: 7.88242244720459\n",
      "Step 198: local_loss: 6.989078044891357\n",
      "Step 199: local_loss: 9.345722198486328\n",
      "Step 200: local_loss: 10.19726276397705\n",
      "Step 201: local_loss: 7.7721171379089355\n",
      "Step 202: local_loss: 6.301661491394043\n",
      "Step 203: local_loss: 6.0282063484191895\n",
      "Step 204: local_loss: 9.410529136657715\n",
      "Step 205: local_loss: 7.643083095550537\n",
      "Step 206: local_loss: 5.038858890533447\n",
      "Step 207: local_loss: 6.808340072631836\n",
      "Step 208: local_loss: 5.764889717102051\n",
      "Step 209: local_loss: 6.775514125823975\n",
      "Step 210: local_loss: 8.470745086669922\n",
      "Step 211: local_loss: 6.542820453643799\n",
      "Step 212: local_loss: 9.510968208312988\n",
      "Step 213: local_loss: 6.687568664550781\n",
      "Step 214: local_loss: 5.735157489776611\n",
      "Step 215: local_loss: 4.714597702026367\n",
      "Step 216: local_loss: 7.893708229064941\n",
      "Step 217: local_loss: 6.612727165222168\n",
      "Step 218: local_loss: 5.6763916015625\n",
      "Step 219: local_loss: 5.3586249351501465\n",
      "Step 220: local_loss: 5.145279407501221\n",
      "Step 221: local_loss: 10.280019760131836\n",
      "Step 222: local_loss: 5.876715183258057\n",
      "Step 223: local_loss: 6.052154541015625\n",
      "Step 224: local_loss: 5.121203899383545\n",
      "Step 225: local_loss: 5.0395588874816895\n",
      "Step 226: local_loss: 6.532607078552246\n",
      "Step 227: local_loss: 6.258718967437744\n",
      "Step 228: local_loss: 8.007551193237305\n",
      "Step 229: local_loss: 7.69253396987915\n",
      "Step 230: local_loss: 9.308234214782715\n",
      "Step 231: local_loss: 8.19840145111084\n",
      "Step 232: local_loss: 6.901337623596191\n",
      "Step 233: local_loss: 8.184343338012695\n",
      "Step 234: local_loss: 4.753467559814453\n",
      "Step 235: local_loss: 6.192979335784912\n",
      "Step 236: local_loss: 5.677224159240723\n",
      "Step 237: local_loss: 5.971626281738281\n",
      "Step 238: local_loss: 7.437137603759766\n",
      "Step 239: local_loss: 7.003993988037109\n",
      "Step 240: local_loss: 7.129053592681885\n",
      "Step 241: local_loss: 4.048840045928955\n",
      "Step 242: local_loss: 4.707006931304932\n",
      "Step 243: local_loss: 5.113946437835693\n",
      "Step 244: local_loss: 4.080394744873047\n",
      "Step 245: local_loss: 5.325669288635254\n",
      "Step 246: local_loss: 4.906176567077637\n",
      "Step 247: local_loss: 7.693035125732422\n",
      "Step 248: local_loss: 7.55199670791626\n",
      "Step 249: local_loss: 7.186685562133789\n",
      "Step 250: local_loss: 6.442567348480225\n",
      "Step 251: local_loss: 4.93147611618042\n",
      "Step 252: local_loss: 4.611440181732178\n",
      "Step 253: local_loss: 6.134866714477539\n",
      "Step 254: local_loss: 5.552664279937744\n",
      "Step 255: local_loss: 5.556879997253418\n",
      "Step 256: local_loss: 5.318424701690674\n",
      "Step 257: local_loss: 7.924487113952637\n",
      "Step 258: local_loss: 5.542001247406006\n",
      "Step 259: local_loss: 5.167453289031982\n",
      "Step 260: local_loss: 7.620208263397217\n",
      "Step 261: local_loss: 7.061763763427734\n",
      "Step 262: local_loss: 4.453296184539795\n",
      "Step 263: local_loss: 3.650580883026123\n",
      "Step 264: local_loss: 4.419011116027832\n",
      "Step 265: local_loss: 5.077675819396973\n",
      "Step 266: local_loss: 4.870521068572998\n",
      "Step 267: local_loss: 8.446763038635254\n",
      "Step 268: local_loss: 4.934953212738037\n",
      "Step 269: local_loss: 4.488858699798584\n",
      "Step 270: local_loss: 8.651040077209473\n",
      "Step 271: local_loss: 5.826630115509033\n",
      "Step 272: local_loss: 6.244754314422607\n",
      "Step 273: local_loss: 5.053499698638916\n",
      "Step 274: local_loss: 5.297159671783447\n",
      "Step 275: local_loss: 5.992886543273926\n",
      "Step 276: local_loss: 4.586610317230225\n",
      "Step 277: local_loss: 5.717217922210693\n",
      "Step 278: local_loss: 5.020170211791992\n",
      "Step 279: local_loss: 4.885286808013916\n",
      "Step 280: local_loss: 5.092382431030273\n",
      "Step 281: local_loss: 6.038612365722656\n",
      "Step 282: local_loss: 5.956273078918457\n",
      "Step 283: local_loss: 4.749202728271484\n",
      "Step 284: local_loss: 6.681482315063477\n",
      "Step 285: local_loss: 5.529362678527832\n",
      "Step 286: local_loss: 5.395409107208252\n",
      "Step 287: local_loss: 5.0353498458862305\n",
      "Step 288: local_loss: 6.169082164764404\n",
      "Step 289: local_loss: 4.007419109344482\n",
      "Step 290: local_loss: 6.041060924530029\n",
      "Step 291: local_loss: 7.4993391036987305\n",
      "Step 292: local_loss: 5.974026679992676\n",
      "Step 293: local_loss: 5.393162727355957\n",
      "Step 294: local_loss: 7.615346431732178\n",
      "Step 295: local_loss: 6.1151604652404785\n",
      "Step 296: local_loss: 4.560251235961914\n",
      "Step 297: local_loss: 5.784640789031982\n",
      "Step 298: local_loss: 4.045540809631348\n",
      "Step 299: local_loss: 6.610982418060303\n",
      "Center node 7312 finished at step 300 with loss 6.6110\n",
      "\n",
      "=== Pretraining Epoch 4 (Center Node: 6714) ===\n",
      "Step 0: local_loss: 5.804574012756348\n",
      "Step 1: local_loss: 6.921444892883301\n",
      "Step 2: local_loss: 6.764054775238037\n",
      "Step 3: local_loss: 7.673748016357422\n",
      "Step 4: local_loss: 6.65430212020874\n",
      "Step 5: local_loss: 4.987390518188477\n",
      "Step 6: local_loss: 4.520288944244385\n",
      "Step 7: local_loss: 5.6143903732299805\n",
      "Step 8: local_loss: 7.0414814949035645\n",
      "Step 9: local_loss: 5.982929706573486\n",
      "Step 10: local_loss: 7.409590244293213\n",
      "Step 11: local_loss: 9.48543643951416\n",
      "Step 12: local_loss: 5.913257598876953\n",
      "Step 13: local_loss: 5.172363758087158\n",
      "Step 14: local_loss: 6.291977405548096\n",
      "Step 15: local_loss: 6.585447311401367\n",
      "Step 16: local_loss: 8.053468704223633\n",
      "Step 17: local_loss: 6.340969085693359\n",
      "Step 23: local_loss: 6.027405261993408\n",
      "Step 24: local_loss: 6.6927490234375\n",
      "Step 25: local_loss: 6.821486949920654\n",
      "Step 26: local_loss: 5.991325855255127\n",
      "Step 27: local_loss: 5.807175636291504\n",
      "Step 28: local_loss: 6.8858747482299805\n",
      "Step 29: local_loss: 4.82083797454834\n",
      "Step 30: local_loss: 8.047725677490234\n",
      "Step 31: local_loss: 4.914590835571289\n",
      "Step 32: local_loss: 5.57634973526001\n",
      "Step 33: local_loss: 5.852670192718506\n",
      "Step 34: local_loss: 6.150288105010986\n",
      "Step 35: local_loss: 4.152758598327637\n",
      "Step 36: local_loss: 8.08010196685791\n",
      "Step 37: local_loss: 5.502688407897949\n",
      "Step 38: local_loss: 7.066719055175781\n",
      "Step 39: local_loss: 6.748445987701416\n",
      "Step 40: local_loss: 7.238736152648926\n",
      "Step 41: local_loss: 7.855154991149902\n",
      "Step 42: local_loss: 5.753548622131348\n",
      "Step 43: local_loss: 7.974132537841797\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step 44: local_loss: 8.041972160339355\n",
      "Step 45: local_loss: 7.49078369140625\n",
      "Step 46: local_loss: 8.412149429321289\n",
      "Step 47: local_loss: 7.521182537078857\n",
      "Step 48: local_loss: 8.7659330368042\n",
      "Step 49: local_loss: 5.822818279266357\n",
      "Step 50: local_loss: 7.127328395843506\n",
      "Step 51: local_loss: 5.5557355880737305\n",
      "Step 52: local_loss: 5.460530757904053\n",
      "Step 53: local_loss: 7.694516181945801\n",
      "Step 54: local_loss: 4.379763603210449\n",
      "Step 55: local_loss: 7.933171272277832\n",
      "Step 56: local_loss: 7.341071605682373\n",
      "Step 57: local_loss: 6.354567050933838\n",
      "Step 58: local_loss: 6.303774356842041\n",
      "Step 59: local_loss: 5.370952129364014\n",
      "Step 60: local_loss: 5.780043601989746\n",
      "Step 61: local_loss: 5.842117786407471\n",
      "Step 62: local_loss: 4.74948787689209\n",
      "Step 63: local_loss: 4.923863887786865\n",
      "Step 64: local_loss: 4.287199020385742\n",
      "Step 65: local_loss: 10.143502235412598\n",
      "Step 66: local_loss: 4.982855319976807\n",
      "Step 67: local_loss: 3.0892629623413086\n",
      "Step 68: local_loss: 3.737217903137207\n",
      "Step 69: local_loss: 6.185999393463135\n",
      "Step 70: local_loss: 7.1733717918396\n",
      "Step 71: local_loss: 6.43094539642334\n",
      "Step 72: local_loss: 6.8502516746521\n",
      "Step 73: local_loss: 6.60724401473999\n",
      "Step 74: local_loss: 5.904503345489502\n",
      "Step 75: local_loss: 4.687033653259277\n",
      "Step 76: local_loss: 9.809440612792969\n",
      "Step 77: local_loss: 5.3469648361206055\n",
      "Step 78: local_loss: 3.89595365524292\n",
      "Step 79: local_loss: 5.436974048614502\n",
      "Step 80: local_loss: 4.719440460205078\n",
      "Step 81: local_loss: 5.263565540313721\n",
      "Step 82: local_loss: 6.530694961547852\n",
      "Step 83: local_loss: 5.633047103881836\n",
      "Step 84: local_loss: 5.96931266784668\n",
      "Step 85: local_loss: 4.534581661224365\n",
      "Step 86: local_loss: 6.5587568283081055\n",
      "Step 87: local_loss: 6.454823017120361\n",
      "Step 88: local_loss: 5.4423322677612305\n",
      "Step 89: local_loss: 6.409080982208252\n",
      "Step 90: local_loss: 8.248196601867676\n",
      "Step 91: local_loss: 5.332934856414795\n",
      "Step 92: local_loss: 5.182344913482666\n",
      "Step 93: local_loss: 3.7434701919555664\n",
      "Step 94: local_loss: 4.859302997589111\n",
      "Step 95: local_loss: 6.159764289855957\n",
      "Step 96: local_loss: 5.119609832763672\n",
      "Step 97: local_loss: 6.693985462188721\n",
      "Step 98: local_loss: 7.299277305603027\n",
      "Step 99: local_loss: 3.049510955810547\n",
      "Step 100: local_loss: 5.40195894241333\n",
      "Step 101: local_loss: 4.231245517730713\n",
      "Step 102: local_loss: 3.6925177574157715\n",
      "Step 103: local_loss: 5.659799575805664\n",
      "Step 104: local_loss: 5.3112592697143555\n",
      "Step 105: local_loss: 6.50382661819458\n",
      "Step 106: local_loss: 4.258392810821533\n",
      "Step 107: local_loss: 7.107748985290527\n",
      "Step 108: local_loss: 5.280604362487793\n",
      "Step 109: local_loss: 4.9429144859313965\n",
      "Step 110: local_loss: 5.536220550537109\n",
      "Step 111: local_loss: 5.738123893737793\n",
      "Step 112: local_loss: 5.307835102081299\n",
      "Step 113: local_loss: 4.685812950134277\n",
      "Step 114: local_loss: 4.993832588195801\n",
      "Step 115: local_loss: 4.567190647125244\n",
      "Step 116: local_loss: 5.616665840148926\n",
      "Step 117: local_loss: 4.06299352645874\n",
      "Step 118: local_loss: 5.445623874664307\n",
      "Step 119: local_loss: 5.071208953857422\n",
      "Step 120: local_loss: 3.4293720722198486\n",
      "Step 121: local_loss: 2.3117830753326416\n",
      "Step 122: local_loss: 12.28270149230957\n",
      "Step 123: local_loss: 4.432875633239746\n",
      "Step 124: local_loss: 10.3960599899292\n",
      "Step 125: local_loss: 4.51525354385376\n",
      "Step 126: local_loss: 6.684893608093262\n",
      "Step 127: local_loss: 4.433661460876465\n",
      "Step 128: local_loss: 7.05519437789917\n",
      "Step 129: local_loss: 6.134354114532471\n",
      "Step 130: local_loss: 4.8418073654174805\n",
      "Step 131: local_loss: 8.695064544677734\n",
      "Step 132: local_loss: 5.810314178466797\n",
      "Step 133: local_loss: 4.717763423919678\n",
      "Step 134: local_loss: 3.6015326976776123\n",
      "Step 135: local_loss: 5.608914375305176\n",
      "Step 136: local_loss: 4.808112144470215\n",
      "Step 137: local_loss: 6.986206531524658\n",
      "Step 138: local_loss: 5.318683624267578\n",
      "Step 139: local_loss: 4.81343936920166\n",
      "Step 140: local_loss: 4.739102840423584\n",
      "Step 141: local_loss: 4.538288116455078\n",
      "Step 142: local_loss: 4.69369649887085\n",
      "Step 143: local_loss: 4.957592010498047\n",
      "Step 144: local_loss: 6.416777610778809\n",
      "Step 145: local_loss: 4.684482097625732\n",
      "Step 146: local_loss: 4.4387006759643555\n",
      "Step 147: local_loss: 6.326013565063477\n",
      "Step 148: local_loss: 4.202804088592529\n",
      "Step 149: local_loss: 4.895170211791992\n",
      "Step 150: local_loss: 4.99121618270874\n",
      "Step 151: local_loss: 3.742866277694702\n",
      "Step 152: local_loss: 4.916613578796387\n",
      "Step 153: local_loss: 5.318423271179199\n",
      "Step 154: local_loss: 4.573030471801758\n",
      "Step 155: local_loss: 7.257059097290039\n",
      "Step 156: local_loss: 6.517911911010742\n",
      "Step 157: local_loss: 9.807767868041992\n",
      "Step 158: local_loss: 4.9190592765808105\n",
      "Step 159: local_loss: 6.10446310043335\n",
      "Step 160: local_loss: 2.864137887954712\n",
      "Step 161: local_loss: 4.509974479675293\n",
      "Step 162: local_loss: 4.543155670166016\n",
      "Step 163: local_loss: 4.729124069213867\n",
      "Step 164: local_loss: 5.932442665100098\n",
      "Step 165: local_loss: 7.1832146644592285\n",
      "Step 166: local_loss: 7.160810947418213\n",
      "Step 167: local_loss: 8.065364837646484\n",
      "Step 168: local_loss: 4.049703121185303\n",
      "Step 169: local_loss: 5.310251235961914\n",
      "Step 170: local_loss: 6.426052570343018\n",
      "Step 171: local_loss: 9.013148307800293\n",
      "Step 172: local_loss: 5.117416858673096\n",
      "Step 173: local_loss: 5.774572849273682\n",
      "Step 174: local_loss: 5.249196529388428\n",
      "Step 175: local_loss: 5.833165645599365\n",
      "Step 176: local_loss: 6.726008415222168\n",
      "Step 177: local_loss: 4.180492877960205\n",
      "Step 178: local_loss: 4.81729793548584\n",
      "Step 179: local_loss: 6.171188831329346\n",
      "Step 180: local_loss: 3.043440818786621\n",
      "Step 181: local_loss: 6.105015277862549\n",
      "Step 182: local_loss: 3.5206172466278076\n",
      "Step 183: local_loss: 5.330124378204346\n",
      "Step 184: local_loss: 6.62900972366333\n",
      "Step 185: local_loss: 4.998734951019287\n",
      "Step 186: local_loss: 2.9611761569976807\n",
      "Step 187: local_loss: 7.456376552581787\n",
      "Step 188: local_loss: 4.283705234527588\n",
      "Step 189: local_loss: 2.9388887882232666\n",
      "Step 190: local_loss: 4.306880950927734\n",
      "Step 191: local_loss: 5.711085319519043\n",
      "Step 192: local_loss: 4.321744918823242\n",
      "Step 193: local_loss: 4.568681716918945\n",
      "Step 194: local_loss: 4.980405807495117\n",
      "Step 195: local_loss: 6.466374397277832\n",
      "Step 196: local_loss: 5.166303634643555\n",
      "Step 197: local_loss: 6.064459323883057\n",
      "Step 198: local_loss: 6.084794998168945\n",
      "Step 199: local_loss: 4.699702262878418\n",
      "Step 200: local_loss: 6.109298229217529\n",
      "Step 201: local_loss: 5.639802932739258\n",
      "Step 202: local_loss: 4.173315525054932\n",
      "Step 203: local_loss: 4.235873699188232\n",
      "Step 204: local_loss: 3.188763380050659\n",
      "Step 205: local_loss: 3.742979049682617\n",
      "Step 206: local_loss: 3.605522394180298\n",
      "Step 207: local_loss: 5.688343048095703\n",
      "Step 208: local_loss: 3.6027026176452637\n",
      "Step 209: local_loss: 4.432284832000732\n",
      "Step 210: local_loss: 4.200251579284668\n",
      "Step 216: local_loss: 5.103869915008545\n",
      "Step 217: local_loss: 4.428987979888916\n",
      "Step 218: local_loss: 3.6374762058258057\n",
      "Step 219: local_loss: 4.161757469177246\n",
      "Step 220: local_loss: 4.661050319671631\n",
      "Step 221: local_loss: 6.149196624755859\n",
      "Step 222: local_loss: 5.296061038970947\n",
      "Step 223: local_loss: 4.4701151847839355\n",
      "Step 224: local_loss: 4.226876735687256\n",
      "Step 225: local_loss: 5.839364051818848\n",
      "Step 226: local_loss: 5.562856197357178\n",
      "Step 227: local_loss: 3.3559439182281494\n",
      "Step 228: local_loss: 3.9750077724456787\n",
      "Step 229: local_loss: 6.799501895904541\n",
      "Step 230: local_loss: 4.746435165405273\n",
      "Step 231: local_loss: 4.315608501434326\n",
      "Step 232: local_loss: 4.6503825187683105\n",
      "Step 233: local_loss: 4.508523464202881\n",
      "Step 234: local_loss: 4.334561347961426\n",
      "Step 235: local_loss: 2.8531112670898438\n",
      "Step 236: local_loss: 5.238883972167969\n",
      "Step 237: local_loss: 3.642817974090576\n",
      "Step 238: local_loss: 3.716470956802368\n",
      "Step 239: local_loss: 5.104019641876221\n",
      "Step 240: local_loss: 7.371776103973389\n",
      "Step 241: local_loss: 4.395405292510986\n",
      "Step 242: local_loss: 3.8272604942321777\n",
      "Step 243: local_loss: 5.5374627113342285\n",
      "Step 244: local_loss: 3.2804577350616455\n",
      "Step 245: local_loss: 5.9773149490356445\n",
      "Step 246: local_loss: 4.601010322570801\n",
      "Step 247: local_loss: 2.8406577110290527\n",
      "Step 248: local_loss: 6.992969036102295\n",
      "Step 249: local_loss: 4.937589645385742\n",
      "Step 250: local_loss: 2.6272172927856445\n",
      "Step 251: local_loss: 3.2808279991149902\n",
      "Step 252: local_loss: 5.002739906311035\n",
      "Step 253: local_loss: 4.866592884063721\n",
      "Step 254: local_loss: 4.235880374908447\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step 255: local_loss: 4.197722434997559\n",
      "Step 256: local_loss: 3.9610958099365234\n",
      "Step 257: local_loss: 3.5976507663726807\n",
      "Step 258: local_loss: 5.028790473937988\n",
      "Step 259: local_loss: 2.540661334991455\n",
      "Step 260: local_loss: 3.9094762802124023\n",
      "Step 261: local_loss: 4.520021438598633\n",
      "Step 262: local_loss: 3.899649143218994\n",
      "Step 263: local_loss: 4.758378028869629\n",
      "Step 264: local_loss: 3.7505815029144287\n",
      "Step 265: local_loss: 2.854337215423584\n",
      "Step 266: local_loss: 3.9798309803009033\n",
      "Step 267: local_loss: 2.9486594200134277\n",
      "Step 268: local_loss: 4.0129265785217285\n",
      "Step 269: local_loss: 3.526564359664917\n",
      "Step 270: local_loss: 5.876113414764404\n",
      "Step 271: local_loss: 4.594807147979736\n",
      "Step 272: local_loss: 3.8814949989318848\n",
      "Step 273: local_loss: 3.153881788253784\n",
      "Step 274: local_loss: 4.210465908050537\n",
      "Step 275: local_loss: 4.5174880027771\n",
      "Step 276: local_loss: 3.6510469913482666\n",
      "Step 277: local_loss: 3.933298349380493\n",
      "Step 278: local_loss: 2.0378258228302\n",
      "Step 279: local_loss: 5.610463619232178\n",
      "Step 280: local_loss: 5.897229194641113\n",
      "Step 281: local_loss: 5.211650848388672\n",
      "Step 282: local_loss: 5.128570079803467\n",
      "Step 283: local_loss: 4.839070796966553\n",
      "Step 284: local_loss: 4.360771179199219\n",
      "Step 285: local_loss: 4.2607927322387695\n",
      "Step 286: local_loss: 7.864747047424316\n",
      "Step 287: local_loss: 5.608273506164551\n",
      "Step 288: local_loss: 4.874578475952148\n",
      "Step 289: local_loss: 5.546301364898682\n",
      "Step 290: local_loss: 4.174020290374756\n",
      "Step 291: local_loss: 5.628860950469971\n",
      "Step 292: local_loss: 4.505099773406982\n",
      "Step 293: local_loss: 9.81599235534668\n",
      "Step 294: local_loss: 4.5355634689331055\n",
      "Step 295: local_loss: 3.5923426151275635\n",
      "Step 296: local_loss: 4.376451015472412\n",
      "Step 297: local_loss: 3.186048984527588\n",
      "Step 298: local_loss: 4.528970241546631\n",
      "Step 299: local_loss: 2.9224324226379395\n",
      "Center node 6714 finished at step 300 with loss 2.9224\n",
      "\n",
      "=== Pretraining Epoch 5 (Center Node: 7617) ===\n",
      "Step 0: local_loss: 10.715470314025879\n",
      "Step 1: local_loss: 12.69177532196045\n",
      "Step 2: local_loss: 11.52444076538086\n",
      "Step 3: local_loss: 13.029638290405273\n",
      "Step 4: local_loss: 9.05798625946045\n",
      "Step 5: local_loss: 13.65566349029541\n",
      "Step 6: local_loss: 11.08741569519043\n",
      "Step 7: local_loss: 13.382416725158691\n",
      "Step 8: local_loss: 13.714232444763184\n",
      "Step 9: local_loss: 10.336217880249023\n",
      "Step 10: local_loss: 11.097709655761719\n",
      "Step 11: local_loss: 10.351300239562988\n",
      "Step 12: local_loss: 13.50150203704834\n",
      "Step 13: local_loss: 9.113584518432617\n",
      "Step 14: local_loss: 12.193171501159668\n",
      "Step 15: local_loss: 15.047365188598633\n",
      "Step 16: local_loss: 10.20760726928711\n",
      "Step 17: local_loss: 11.090275764465332\n",
      "Step 18: local_loss: 7.714231967926025\n",
      "Step 19: local_loss: 7.963132858276367\n",
      "Step 20: local_loss: 12.966221809387207\n",
      "Step 21: local_loss: 14.1012544631958\n",
      "Step 22: local_loss: 11.437962532043457\n",
      "Step 23: local_loss: 12.293852806091309\n",
      "Step 24: local_loss: 13.940719604492188\n",
      "Step 25: local_loss: 8.582615852355957\n",
      "Step 26: local_loss: 10.247758865356445\n",
      "Step 27: local_loss: 9.820958137512207\n",
      "Step 28: local_loss: 8.962191581726074\n",
      "Step 29: local_loss: 15.539055824279785\n",
      "Step 30: local_loss: 9.203071594238281\n",
      "Step 31: local_loss: 9.582430839538574\n",
      "Step 32: local_loss: 9.608378410339355\n",
      "Step 33: local_loss: 14.111916542053223\n",
      "Step 34: local_loss: 11.584091186523438\n",
      "Step 35: local_loss: 13.081108093261719\n",
      "Step 36: local_loss: 17.329160690307617\n",
      "Step 37: local_loss: 5.012932300567627\n",
      "Step 38: local_loss: 10.576986312866211\n",
      "Step 39: local_loss: 10.157204627990723\n",
      "Step 40: local_loss: 10.665794372558594\n",
      "Step 41: local_loss: 13.030355453491211\n",
      "Step 42: local_loss: 13.82096004486084\n",
      "Step 43: local_loss: 11.255208969116211\n",
      "Step 44: local_loss: 12.379871368408203\n",
      "Step 45: local_loss: 9.508254051208496\n",
      "Step 46: local_loss: 11.505727767944336\n",
      "Step 47: local_loss: 8.686590194702148\n",
      "Step 48: local_loss: 13.12464427947998\n",
      "Step 49: local_loss: 9.51919174194336\n",
      "Step 50: local_loss: 11.994994163513184\n",
      "Step 51: local_loss: 12.01906967163086\n",
      "Step 52: local_loss: 11.29399585723877\n",
      "Step 53: local_loss: 9.39604663848877\n",
      "Step 54: local_loss: 12.23889446258545\n",
      "Step 55: local_loss: 13.647257804870605\n",
      "Step 56: local_loss: 10.234546661376953\n",
      "Step 57: local_loss: 11.842379570007324\n",
      "Step 58: local_loss: 8.69151496887207\n",
      "Step 59: local_loss: 9.752819061279297\n",
      "Step 60: local_loss: 8.659756660461426\n",
      "Step 61: local_loss: 9.753885269165039\n",
      "Step 62: local_loss: 6.021213054656982\n",
      "Step 63: local_loss: 10.98576545715332\n",
      "Step 64: local_loss: 12.050776481628418\n",
      "Step 65: local_loss: 11.962807655334473\n",
      "Step 66: local_loss: 9.76590347290039\n",
      "Step 67: local_loss: 10.784974098205566\n",
      "Step 68: local_loss: 10.34896183013916\n",
      "Step 69: local_loss: 7.683842182159424\n",
      "Step 70: local_loss: 12.357046127319336\n",
      "Step 71: local_loss: 12.639201164245605\n",
      "Step 72: local_loss: 13.43103313446045\n",
      "Step 73: local_loss: 14.4083833694458\n",
      "Step 74: local_loss: 6.626548767089844\n",
      "Step 75: local_loss: 7.257214069366455\n",
      "Step 76: local_loss: 10.121112823486328\n",
      "Step 77: local_loss: 10.389870643615723\n",
      "Step 78: local_loss: 10.845279693603516\n",
      "Step 79: local_loss: 12.142952919006348\n",
      "Step 80: local_loss: 11.522709846496582\n",
      "Step 81: local_loss: 12.211520195007324\n",
      "Step 82: local_loss: 10.491345405578613\n",
      "Step 83: local_loss: 11.699563980102539\n",
      "Step 84: local_loss: 8.945746421813965\n",
      "Step 85: local_loss: 8.175382614135742\n",
      "Step 86: local_loss: 11.594916343688965\n",
      "Step 87: local_loss: 7.736036777496338\n",
      "Step 88: local_loss: 11.585280418395996\n",
      "Step 89: local_loss: 11.545755386352539\n",
      "Step 90: local_loss: 11.543968200683594\n",
      "Step 91: local_loss: 13.47704029083252\n",
      "Step 92: local_loss: 9.42432689666748\n",
      "Step 93: local_loss: 11.45277214050293\n",
      "Step 94: local_loss: 11.496000289916992\n",
      "Step 95: local_loss: 10.45638656616211\n",
      "Step 96: local_loss: 13.022744178771973\n",
      "Step 97: local_loss: 10.497841835021973\n",
      "Step 98: local_loss: 9.042445182800293\n",
      "Step 99: local_loss: 10.474312782287598\n",
      "Step 100: local_loss: 9.893056869506836\n",
      "Step 101: local_loss: 12.613378524780273\n",
      "Step 102: local_loss: 9.004483222961426\n",
      "Step 103: local_loss: 12.472332954406738\n",
      "Step 104: local_loss: 8.841339111328125\n",
      "Step 105: local_loss: 10.618237495422363\n",
      "Step 106: local_loss: 10.366357803344727\n",
      "Step 107: local_loss: 12.932458877563477\n",
      "Step 108: local_loss: 8.868417739868164\n",
      "Step 109: local_loss: 11.685420989990234\n",
      "Step 110: local_loss: 10.028450965881348\n",
      "Step 111: local_loss: 12.557814598083496\n",
      "Step 112: local_loss: 9.957280158996582\n",
      "Step 113: local_loss: 5.65864896774292\n",
      "Step 114: local_loss: 10.41537857055664\n",
      "Step 115: local_loss: 10.622213363647461\n",
      "Step 116: local_loss: 9.509106636047363\n",
      "Step 117: local_loss: 12.086466789245605\n",
      "Step 118: local_loss: 11.55412769317627\n",
      "Step 119: local_loss: 8.2106294631958\n",
      "Step 120: local_loss: 8.228873252868652\n",
      "Step 121: local_loss: 10.080221176147461\n",
      "Step 122: local_loss: 8.178919792175293\n",
      "Step 123: local_loss: 10.181600570678711\n",
      "Step 124: local_loss: 7.54862642288208\n",
      "Step 125: local_loss: 12.176218032836914\n",
      "Step 126: local_loss: 9.269667625427246\n",
      "Step 127: local_loss: 12.874884605407715\n",
      "Step 128: local_loss: 8.907509803771973\n",
      "Step 129: local_loss: 7.327699661254883\n",
      "Step 130: local_loss: 8.599602699279785\n",
      "Step 131: local_loss: 8.696858406066895\n",
      "Step 132: local_loss: 7.5309648513793945\n",
      "Step 133: local_loss: 9.622692108154297\n",
      "Step 134: local_loss: 11.730963706970215\n",
      "Step 135: local_loss: 11.540061950683594\n",
      "Step 136: local_loss: 11.2444486618042\n",
      "Step 137: local_loss: 8.522281646728516\n",
      "Step 138: local_loss: 9.569790840148926\n",
      "Step 139: local_loss: 7.8077802658081055\n",
      "Step 140: local_loss: 8.275394439697266\n",
      "Step 141: local_loss: 7.132388591766357\n",
      "Step 142: local_loss: 9.241768836975098\n",
      "Step 143: local_loss: 7.860295295715332\n",
      "Step 144: local_loss: 8.551824569702148\n",
      "Step 145: local_loss: 9.333895683288574\n",
      "Step 146: local_loss: 9.372359275817871\n",
      "Step 147: local_loss: 5.889909267425537\n",
      "Step 148: local_loss: 6.175819396972656\n",
      "Step 149: local_loss: 8.404621124267578\n",
      "Step 150: local_loss: 6.19939661026001\n",
      "Step 151: local_loss: 9.406118392944336\n",
      "Step 152: local_loss: 9.854150772094727\n",
      "Step 153: local_loss: 9.76587200164795\n",
      "Step 154: local_loss: 9.984713554382324\n",
      "Step 155: local_loss: 8.440625190734863\n",
      "Step 156: local_loss: 8.821044921875\n",
      "Step 157: local_loss: 10.11900806427002\n",
      "Step 158: local_loss: 8.7686767578125\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step 159: local_loss: 7.818928241729736\n",
      "Step 160: local_loss: 6.279457092285156\n",
      "Step 161: local_loss: 8.225398063659668\n",
      "Step 162: local_loss: 8.64188289642334\n",
      "Step 163: local_loss: 9.324652671813965\n",
      "Step 164: local_loss: 8.028583526611328\n",
      "Step 165: local_loss: 10.873067855834961\n",
      "Step 166: local_loss: 7.940423011779785\n",
      "Step 167: local_loss: 7.876698017120361\n",
      "Step 168: local_loss: 8.302142143249512\n",
      "Step 169: local_loss: 11.743881225585938\n",
      "Step 170: local_loss: 8.656654357910156\n",
      "Step 171: local_loss: 14.032150268554688\n",
      "Step 172: local_loss: 11.119287490844727\n",
      "Step 173: local_loss: 8.041251182556152\n",
      "Step 174: local_loss: 9.373745918273926\n",
      "Step 175: local_loss: 8.93424129486084\n",
      "Step 176: local_loss: 8.390291213989258\n",
      "Step 177: local_loss: 6.664642810821533\n",
      "Step 178: local_loss: 7.417865753173828\n",
      "Step 179: local_loss: 8.8184232711792\n",
      "Step 180: local_loss: 9.489228248596191\n",
      "Step 181: local_loss: 6.425788879394531\n",
      "Step 182: local_loss: 11.960707664489746\n",
      "Step 183: local_loss: 8.621467590332031\n",
      "Step 184: local_loss: 10.608006477355957\n",
      "Step 185: local_loss: 9.156213760375977\n",
      "Step 186: local_loss: 10.904779434204102\n",
      "Step 187: local_loss: 8.574240684509277\n",
      "Step 188: local_loss: 7.239915370941162\n",
      "Step 189: local_loss: 6.089585781097412\n",
      "Step 190: local_loss: 6.682751655578613\n",
      "Step 191: local_loss: 9.121075630187988\n",
      "Step 192: local_loss: 7.395652770996094\n",
      "Step 193: local_loss: 8.534010887145996\n",
      "Step 194: local_loss: 8.733746528625488\n",
      "Step 195: local_loss: 7.653263568878174\n",
      "Step 196: local_loss: 9.16904354095459\n",
      "Step 197: local_loss: 12.601943969726562\n",
      "Step 198: local_loss: 10.576068878173828\n",
      "Step 199: local_loss: 8.491856575012207\n",
      "Step 200: local_loss: 9.998692512512207\n",
      "Step 201: local_loss: 7.92148494720459\n",
      "Step 202: local_loss: 5.475220203399658\n",
      "Step 203: local_loss: 7.1999592781066895\n",
      "Step 204: local_loss: 8.145648956298828\n",
      "Step 205: local_loss: 10.93458080291748\n",
      "Step 206: local_loss: 8.430228233337402\n",
      "Step 207: local_loss: 8.062636375427246\n",
      "Step 208: local_loss: 9.3749418258667\n",
      "Step 209: local_loss: 9.529767036437988\n",
      "Step 210: local_loss: 4.7771406173706055\n",
      "Step 211: local_loss: 7.906728744506836\n",
      "Step 212: local_loss: 7.689388751983643\n",
      "Step 213: local_loss: 9.040067672729492\n",
      "Step 214: local_loss: 7.847922325134277\n",
      "Step 215: local_loss: 10.591106414794922\n",
      "Step 216: local_loss: 8.158102989196777\n",
      "Step 217: local_loss: 9.39599895477295\n",
      "Step 218: local_loss: 9.852307319641113\n",
      "Step 219: local_loss: 11.972919464111328\n",
      "Step 220: local_loss: 9.280143737792969\n",
      "Step 221: local_loss: 7.936772346496582\n",
      "Step 222: local_loss: 8.381479263305664\n",
      "Step 223: local_loss: 10.020284652709961\n",
      "Step 224: local_loss: 8.382265090942383\n",
      "Step 225: local_loss: 6.617111682891846\n",
      "Step 226: local_loss: 11.167635917663574\n",
      "Step 227: local_loss: 5.500666618347168\n",
      "Step 228: local_loss: 6.442473411560059\n",
      "Step 229: local_loss: 8.868335723876953\n",
      "Step 230: local_loss: 8.633406639099121\n",
      "Step 231: local_loss: 8.576018333435059\n",
      "Step 232: local_loss: 7.640925884246826\n",
      "Step 233: local_loss: 6.945284843444824\n",
      "Step 234: local_loss: 6.9521403312683105\n",
      "Step 235: local_loss: 6.853227138519287\n",
      "Step 236: local_loss: 7.992405414581299\n",
      "Step 237: local_loss: 8.173355102539062\n",
      "Step 238: local_loss: 10.213005065917969\n",
      "Step 239: local_loss: 13.239287376403809\n",
      "Step 240: local_loss: 7.659273624420166\n",
      "Step 241: local_loss: 8.408790588378906\n",
      "Step 242: local_loss: 10.173471450805664\n",
      "Step 243: local_loss: 9.801473617553711\n",
      "Step 244: local_loss: 12.353200912475586\n",
      "Step 245: local_loss: 8.858067512512207\n",
      "Step 246: local_loss: 6.040736198425293\n",
      "Step 247: local_loss: 10.179105758666992\n",
      "Step 248: local_loss: 6.853553771972656\n",
      "Step 249: local_loss: 10.361533164978027\n",
      "Step 250: local_loss: 8.392996788024902\n",
      "Step 251: local_loss: 8.72030258178711\n",
      "Step 252: local_loss: 9.566845893859863\n",
      "Step 253: local_loss: 8.879671096801758\n",
      "Step 254: local_loss: 5.851625919342041\n",
      "Step 255: local_loss: 8.485607147216797\n",
      "Step 256: local_loss: 8.181325912475586\n",
      "Step 257: local_loss: 8.520846366882324\n",
      "Step 258: local_loss: 6.585423946380615\n",
      "Step 259: local_loss: 6.8528008460998535\n",
      "Step 260: local_loss: 8.193921089172363\n",
      "Step 261: local_loss: 5.178395748138428\n",
      "Step 262: local_loss: 6.73605489730835\n",
      "Step 263: local_loss: 7.393625259399414\n",
      "Step 264: local_loss: 6.945694446563721\n",
      "Step 265: local_loss: 10.527020454406738\n",
      "Step 266: local_loss: 9.030426025390625\n",
      "Step 267: local_loss: 5.816018104553223\n",
      "Step 268: local_loss: 9.04783821105957\n",
      "Step 269: local_loss: 7.951023101806641\n",
      "Step 270: local_loss: 5.350813865661621\n",
      "Step 271: local_loss: 8.369112014770508\n",
      "Step 272: local_loss: 8.449230194091797\n",
      "Step 273: local_loss: 8.80455207824707\n",
      "Step 274: local_loss: 7.9799275398254395\n",
      "Step 275: local_loss: 9.133841514587402\n",
      "Step 276: local_loss: 7.818539619445801\n",
      "Step 277: local_loss: 9.731691360473633\n",
      "Step 278: local_loss: 4.136554718017578\n",
      "Step 279: local_loss: 9.129002571105957\n",
      "Step 280: local_loss: 8.290648460388184\n",
      "Step 281: local_loss: 10.028008460998535\n",
      "Step 282: local_loss: 8.217485427856445\n",
      "Step 283: local_loss: 6.941219329833984\n",
      "Step 284: local_loss: 4.5249433517456055\n",
      "Step 285: local_loss: 13.28297233581543\n",
      "Step 286: local_loss: 6.71333122253418\n",
      "Step 287: local_loss: 7.969102382659912\n",
      "Step 288: local_loss: 6.411767959594727\n",
      "Step 289: local_loss: 4.600874423980713\n",
      "Step 290: local_loss: 6.058692932128906\n",
      "Step 291: local_loss: 7.592586517333984\n",
      "Step 292: local_loss: 9.793452262878418\n",
      "Step 293: local_loss: 7.517823696136475\n",
      "Step 294: local_loss: 6.273745059967041\n",
      "Step 295: local_loss: 6.265683650970459\n",
      "Step 296: local_loss: 10.029045104980469\n",
      "Step 297: local_loss: 8.026302337646484\n",
      "Step 298: local_loss: 8.316766738891602\n",
      "Step 299: local_loss: 8.337846755981445\n",
      "Center node 7617 finished at step 300 with loss 8.3378\n",
      "\n",
      "=== Pretraining Epoch 6 (Center Node: 297) ===\n",
      "Step 0: local_loss: 13.878005981445312\n",
      "Step 1: local_loss: 13.534990310668945\n",
      "Step 2: local_loss: 13.881769180297852\n",
      "Step 3: local_loss: 13.083663940429688\n",
      "Step 4: local_loss: 15.259201049804688\n",
      "Step 5: local_loss: 9.69636344909668\n",
      "Step 6: local_loss: 13.293045997619629\n",
      "Step 7: local_loss: 13.146678924560547\n",
      "Step 8: local_loss: 8.32529067993164\n",
      "Step 9: local_loss: 12.809385299682617\n",
      "Step 10: local_loss: 15.647820472717285\n",
      "Step 11: local_loss: 12.575248718261719\n",
      "Step 12: local_loss: 13.409404754638672\n",
      "Step 13: local_loss: 16.304298400878906\n",
      "Step 14: local_loss: 13.735400199890137\n",
      "Step 15: local_loss: 16.531627655029297\n",
      "Step 16: local_loss: 16.951873779296875\n",
      "Step 17: local_loss: 8.181554794311523\n",
      "Step 18: local_loss: 12.413729667663574\n",
      "Step 19: local_loss: 10.460899353027344\n",
      "Step 20: local_loss: 11.381081581115723\n",
      "Step 21: local_loss: 12.324642181396484\n",
      "Step 22: local_loss: 9.199457168579102\n",
      "Step 23: local_loss: 9.293000221252441\n",
      "Step 24: local_loss: 10.294883728027344\n",
      "Step 25: local_loss: 10.184155464172363\n",
      "Step 26: local_loss: 9.447484016418457\n",
      "Step 27: local_loss: 10.073694229125977\n",
      "Step 28: local_loss: 12.22494125366211\n",
      "Step 29: local_loss: 7.861415863037109\n",
      "Step 30: local_loss: 15.35746955871582\n",
      "Step 31: local_loss: 13.158782958984375\n",
      "Step 32: local_loss: 11.089151382446289\n",
      "Step 33: local_loss: 10.889963150024414\n",
      "Step 34: local_loss: 10.125222206115723\n",
      "Step 35: local_loss: 11.154411315917969\n",
      "Step 36: local_loss: 12.876110076904297\n",
      "Step 37: local_loss: 9.646334648132324\n",
      "Step 38: local_loss: 12.405619621276855\n",
      "Step 39: local_loss: 12.936910629272461\n",
      "Step 40: local_loss: 14.355605125427246\n",
      "Step 41: local_loss: 9.630977630615234\n",
      "Step 42: local_loss: 9.289678573608398\n",
      "Step 43: local_loss: 13.57406234741211\n",
      "Step 44: local_loss: 11.777745246887207\n",
      "Step 45: local_loss: 9.015289306640625\n",
      "Step 46: local_loss: 10.071993827819824\n",
      "Step 47: local_loss: 8.032445907592773\n",
      "Step 48: local_loss: 11.363452911376953\n",
      "Step 49: local_loss: 12.566742897033691\n",
      "Step 50: local_loss: 10.956413269042969\n",
      "Step 51: local_loss: 13.098987579345703\n",
      "Step 52: local_loss: 13.885088920593262\n",
      "Step 53: local_loss: 7.291040420532227\n",
      "Step 54: local_loss: 13.396748542785645\n",
      "Step 55: local_loss: 12.464040756225586\n",
      "Step 56: local_loss: 9.66080379486084\n",
      "Step 57: local_loss: 11.202927589416504\n",
      "Step 58: local_loss: 7.995492935180664\n",
      "Step 59: local_loss: 11.555265426635742\n",
      "Step 60: local_loss: 13.189799308776855\n",
      "Step 61: local_loss: 12.408110618591309\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step 62: local_loss: 13.291808128356934\n",
      "Step 63: local_loss: 9.243700981140137\n",
      "Step 64: local_loss: 12.083330154418945\n",
      "Step 65: local_loss: 10.979039192199707\n",
      "Step 66: local_loss: 7.556898593902588\n",
      "Step 67: local_loss: 10.523242950439453\n",
      "Step 68: local_loss: 13.254785537719727\n",
      "Step 69: local_loss: 9.104992866516113\n",
      "Step 70: local_loss: 13.663986206054688\n",
      "Step 71: local_loss: 10.833895683288574\n",
      "Step 72: local_loss: 13.271164894104004\n",
      "Step 73: local_loss: 14.259257316589355\n",
      "Step 74: local_loss: 10.831567764282227\n",
      "Step 75: local_loss: 9.986103057861328\n",
      "Step 76: local_loss: 13.798538208007812\n",
      "Step 77: local_loss: 6.105127334594727\n",
      "Step 78: local_loss: 12.071771621704102\n",
      "Step 79: local_loss: 8.020700454711914\n",
      "Step 80: local_loss: 13.89111328125\n",
      "Step 81: local_loss: 11.835282325744629\n",
      "Step 82: local_loss: 11.602943420410156\n",
      "Step 83: local_loss: 11.11221981048584\n",
      "Step 84: local_loss: 12.133747100830078\n",
      "Step 85: local_loss: 11.706254005432129\n",
      "Step 86: local_loss: 14.101507186889648\n",
      "Step 87: local_loss: 12.418203353881836\n",
      "Step 88: local_loss: 10.3778715133667\n",
      "Step 89: local_loss: 11.319704055786133\n",
      "Step 90: local_loss: 11.378792762756348\n",
      "Step 91: local_loss: 8.79078483581543\n",
      "Step 92: local_loss: 11.617249488830566\n",
      "Step 93: local_loss: 10.071135520935059\n",
      "Step 94: local_loss: 10.793408393859863\n",
      "Step 95: local_loss: 10.813350677490234\n",
      "Step 96: local_loss: 10.473039627075195\n",
      "Step 97: local_loss: 9.516694068908691\n",
      "Step 98: local_loss: 10.922524452209473\n",
      "Step 99: local_loss: 18.466285705566406\n",
      "Step 100: local_loss: 10.748451232910156\n",
      "Step 101: local_loss: 8.6643648147583\n",
      "Step 102: local_loss: 7.900073528289795\n",
      "Step 103: local_loss: 11.854082107543945\n",
      "Step 104: local_loss: 10.177289009094238\n",
      "Step 105: local_loss: 16.088356018066406\n",
      "Step 106: local_loss: 9.56851863861084\n",
      "Step 107: local_loss: 10.150044441223145\n",
      "Step 108: local_loss: 13.022912979125977\n",
      "Step 109: local_loss: 10.99168872833252\n",
      "Step 110: local_loss: 6.90244197845459\n",
      "Step 111: local_loss: 10.499225616455078\n",
      "Step 112: local_loss: 17.28036117553711\n",
      "Step 113: local_loss: 9.283578872680664\n",
      "Step 114: local_loss: 15.704289436340332\n",
      "Step 115: local_loss: 16.810388565063477\n",
      "Step 116: local_loss: 8.393254280090332\n",
      "Step 117: local_loss: 12.90327262878418\n",
      "Step 118: local_loss: 8.753093719482422\n",
      "Step 119: local_loss: 12.600890159606934\n",
      "Step 120: local_loss: 13.048558235168457\n",
      "Step 121: local_loss: 11.162778854370117\n",
      "Step 122: local_loss: 11.740976333618164\n",
      "Step 123: local_loss: 14.896641731262207\n",
      "Step 124: local_loss: 7.385166645050049\n",
      "Step 125: local_loss: 12.638107299804688\n",
      "Step 126: local_loss: 12.072786331176758\n",
      "Step 127: local_loss: 12.641325950622559\n",
      "Step 128: local_loss: 11.658827781677246\n",
      "Step 129: local_loss: 10.6748685836792\n",
      "Step 130: local_loss: 11.42837142944336\n",
      "Step 131: local_loss: 11.023159980773926\n",
      "Step 132: local_loss: 9.422163009643555\n",
      "Step 133: local_loss: 9.58991527557373\n",
      "Step 134: local_loss: 9.706003189086914\n",
      "Step 135: local_loss: 11.814494132995605\n",
      "Step 136: local_loss: 10.19005012512207\n",
      "Step 137: local_loss: 12.393245697021484\n",
      "Step 138: local_loss: 9.916906356811523\n",
      "Step 139: local_loss: 10.647087097167969\n",
      "Step 140: local_loss: 8.72551155090332\n",
      "Step 141: local_loss: 12.922590255737305\n",
      "Step 142: local_loss: 12.898971557617188\n",
      "Step 143: local_loss: 14.876022338867188\n",
      "Step 144: local_loss: 11.446712493896484\n",
      "Step 145: local_loss: 10.100808143615723\n",
      "Step 146: local_loss: 12.973626136779785\n",
      "Step 147: local_loss: 10.354138374328613\n",
      "Step 148: local_loss: 10.174903869628906\n",
      "Step 149: local_loss: 11.063639640808105\n",
      "Step 150: local_loss: 9.591630935668945\n",
      "Step 151: local_loss: 8.847953796386719\n",
      "Step 152: local_loss: 8.457063674926758\n",
      "Step 153: local_loss: 10.750893592834473\n",
      "Step 154: local_loss: 8.105024337768555\n",
      "Step 155: local_loss: 9.078670501708984\n",
      "Step 156: local_loss: 12.883179664611816\n",
      "Step 157: local_loss: 11.335733413696289\n",
      "Step 158: local_loss: 12.313323020935059\n",
      "Step 159: local_loss: 10.5878324508667\n",
      "Step 160: local_loss: 10.220212936401367\n",
      "Step 161: local_loss: 11.62241268157959\n",
      "Step 162: local_loss: 11.367170333862305\n",
      "Step 163: local_loss: 9.187933921813965\n",
      "Step 164: local_loss: 8.496688842773438\n",
      "Step 165: local_loss: 11.211020469665527\n",
      "Step 166: local_loss: 11.225492477416992\n",
      "Step 167: local_loss: 10.141974449157715\n",
      "Step 168: local_loss: 10.129634857177734\n",
      "Step 169: local_loss: 11.738320350646973\n",
      "Step 170: local_loss: 9.75084114074707\n",
      "Step 171: local_loss: 10.958520889282227\n",
      "Step 172: local_loss: 14.957114219665527\n",
      "Step 173: local_loss: 11.444157600402832\n",
      "Step 174: local_loss: 11.959809303283691\n",
      "Step 175: local_loss: 9.428143501281738\n",
      "Step 176: local_loss: 10.510147094726562\n",
      "Step 177: local_loss: 10.434354782104492\n",
      "Step 178: local_loss: 9.690892219543457\n",
      "Step 179: local_loss: 12.138022422790527\n",
      "Step 180: local_loss: 8.53098201751709\n",
      "Step 181: local_loss: 12.795412063598633\n",
      "Step 182: local_loss: 12.001067161560059\n",
      "Step 183: local_loss: 11.039507865905762\n",
      "Step 184: local_loss: 11.72164249420166\n",
      "Step 185: local_loss: 10.22984790802002\n",
      "Step 186: local_loss: 7.190474987030029\n",
      "Step 187: local_loss: 8.36980152130127\n",
      "Step 188: local_loss: 8.178325653076172\n",
      "Step 189: local_loss: 9.011157989501953\n",
      "Step 190: local_loss: 11.533584594726562\n",
      "Step 191: local_loss: 10.687143325805664\n",
      "Step 192: local_loss: 12.593896865844727\n",
      "Step 193: local_loss: 8.277693748474121\n",
      "Step 194: local_loss: 10.50136947631836\n",
      "Step 195: local_loss: 11.655194282531738\n",
      "Step 196: local_loss: 8.920907020568848\n",
      "Step 197: local_loss: 8.97244930267334\n",
      "Step 198: local_loss: 9.966630935668945\n",
      "Step 199: local_loss: 14.250884056091309\n",
      "Step 200: local_loss: 6.892703533172607\n",
      "Step 201: local_loss: 9.866031646728516\n",
      "Step 202: local_loss: 13.04094409942627\n",
      "Step 203: local_loss: 8.750285148620605\n",
      "Step 204: local_loss: 9.56998348236084\n",
      "Step 205: local_loss: 8.127386093139648\n",
      "Step 206: local_loss: 11.005078315734863\n",
      "Step 207: local_loss: 7.200563907623291\n",
      "Step 208: local_loss: 9.713616371154785\n",
      "Step 209: local_loss: 7.778488636016846\n",
      "Step 210: local_loss: 11.188602447509766\n",
      "Step 211: local_loss: 12.862236022949219\n",
      "Step 212: local_loss: 8.253731727600098\n",
      "Step 213: local_loss: 10.570621490478516\n",
      "Step 214: local_loss: 9.743152618408203\n",
      "Step 215: local_loss: 9.477482795715332\n",
      "Step 216: local_loss: 11.071782112121582\n",
      "Step 217: local_loss: 9.45931625366211\n",
      "Step 218: local_loss: 9.224915504455566\n",
      "Step 219: local_loss: 10.23461627960205\n",
      "Step 220: local_loss: 11.381282806396484\n",
      "Step 221: local_loss: 10.915138244628906\n",
      "Step 222: local_loss: 9.171465873718262\n",
      "Step 223: local_loss: 13.939321517944336\n",
      "Step 224: local_loss: 11.325929641723633\n",
      "Step 225: local_loss: 9.651507377624512\n",
      "Step 226: local_loss: 9.081348419189453\n",
      "Step 227: local_loss: 10.512274742126465\n",
      "Step 228: local_loss: 12.995346069335938\n",
      "Step 229: local_loss: 9.938044548034668\n",
      "Step 230: local_loss: 8.327177047729492\n",
      "Step 231: local_loss: 11.164569854736328\n",
      "Step 232: local_loss: 9.56906509399414\n",
      "Step 233: local_loss: 8.190400123596191\n",
      "Step 234: local_loss: 9.987397193908691\n",
      "Step 235: local_loss: 10.523797988891602\n",
      "Step 236: local_loss: 9.298582077026367\n",
      "Step 237: local_loss: 10.46625804901123\n",
      "Step 238: local_loss: 8.558690071105957\n",
      "Step 239: local_loss: 9.692255020141602\n",
      "Step 240: local_loss: 8.665371894836426\n",
      "Step 241: local_loss: 9.547323226928711\n",
      "Step 242: local_loss: 7.1969828605651855\n",
      "Step 243: local_loss: 9.193596839904785\n",
      "Step 244: local_loss: 8.405074119567871\n",
      "Step 245: local_loss: 8.63957691192627\n",
      "Step 246: local_loss: 9.598462104797363\n",
      "Step 247: local_loss: 11.038178443908691\n",
      "Step 248: local_loss: 10.65781307220459\n",
      "Step 249: local_loss: 11.037618637084961\n",
      "Step 250: local_loss: 8.593953132629395\n",
      "Step 251: local_loss: 10.86249828338623\n",
      "Step 252: local_loss: 12.09160041809082\n",
      "Step 253: local_loss: 11.680953025817871\n",
      "Step 254: local_loss: 9.849567413330078\n",
      "Step 255: local_loss: 12.515490531921387\n",
      "Step 256: local_loss: 8.963854789733887\n",
      "Step 257: local_loss: 14.694782257080078\n",
      "Step 258: local_loss: 15.713561058044434\n",
      "Step 259: local_loss: 16.95378303527832\n",
      "Step 260: local_loss: 8.9194917678833\n",
      "Step 261: local_loss: 9.392023086547852\n",
      "Step 262: local_loss: 7.887283802032471\n",
      "Step 263: local_loss: 9.433876037597656\n",
      "Step 264: local_loss: 11.939404487609863\n",
      "Step 265: local_loss: 9.107400894165039\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step 266: local_loss: 10.103044509887695\n",
      "Step 267: local_loss: 14.375482559204102\n",
      "Step 268: local_loss: 9.876727104187012\n",
      "Step 269: local_loss: 7.469841957092285\n",
      "Step 270: local_loss: 6.40897798538208\n",
      "Step 271: local_loss: 10.794551849365234\n",
      "Step 272: local_loss: 8.825926780700684\n",
      "Step 273: local_loss: 6.752370357513428\n",
      "Step 274: local_loss: 7.837014198303223\n",
      "Step 275: local_loss: 7.159355640411377\n",
      "Step 276: local_loss: 9.107281684875488\n",
      "Step 277: local_loss: 12.522377014160156\n",
      "Step 278: local_loss: 9.218009948730469\n",
      "Step 279: local_loss: 9.228222846984863\n",
      "Step 280: local_loss: 6.826394557952881\n",
      "Step 281: local_loss: 8.52059268951416\n",
      "Step 282: local_loss: 9.360859870910645\n",
      "Step 283: local_loss: 9.109124183654785\n",
      "Step 284: local_loss: 9.01506233215332\n",
      "Step 285: local_loss: 8.581157684326172\n",
      "Step 286: local_loss: 8.368325233459473\n",
      "Step 287: local_loss: 11.69194221496582\n",
      "Step 288: local_loss: 9.49073600769043\n",
      "Step 289: local_loss: 10.631564140319824\n",
      "Step 290: local_loss: 9.787103652954102\n",
      "Step 291: local_loss: 8.339395523071289\n",
      "Step 292: local_loss: 11.985817909240723\n",
      "Step 293: local_loss: 6.573463439941406\n",
      "Step 294: local_loss: 9.07483959197998\n",
      "Step 295: local_loss: 8.8567533493042\n",
      "Step 296: local_loss: 7.926840782165527\n",
      "Step 297: local_loss: 9.401923179626465\n",
      "Step 298: local_loss: 10.508097648620605\n",
      "Step 299: local_loss: 10.193807601928711\n",
      "Center node 297 finished at step 300 with loss 10.1938\n",
      "\n",
      "=== Pretraining Epoch 7 (Center Node: 10280) ===\n",
      "Step 0: local_loss: 8.054282188415527\n",
      "Step 1: local_loss: 7.049309730529785\n",
      "Step 2: local_loss: 9.426631927490234\n",
      "Step 3: local_loss: 12.31950569152832\n",
      "Step 4: local_loss: 9.121176719665527\n",
      "Step 5: local_loss: 9.73740005493164\n",
      "Step 6: local_loss: 10.578158378601074\n",
      "Step 7: local_loss: 9.062079429626465\n",
      "Step 8: local_loss: 6.688329696655273\n",
      "Step 9: local_loss: 9.964969635009766\n",
      "Step 10: local_loss: 9.480432510375977\n",
      "Step 11: local_loss: 7.299556255340576\n",
      "Step 12: local_loss: 8.506317138671875\n",
      "Step 13: local_loss: 10.288546562194824\n",
      "Step 14: local_loss: 12.374918937683105\n",
      "Step 15: local_loss: 8.029152870178223\n",
      "Step 16: local_loss: 10.112629890441895\n",
      "Step 17: local_loss: 7.333058834075928\n",
      "Step 18: local_loss: 8.746545791625977\n",
      "Step 19: local_loss: 7.4359869956970215\n",
      "Step 20: local_loss: 9.548254013061523\n",
      "Step 21: local_loss: 7.470043659210205\n",
      "Step 22: local_loss: 10.476786613464355\n",
      "Step 23: local_loss: 6.912332534790039\n",
      "Step 24: local_loss: 10.77344799041748\n",
      "Step 25: local_loss: 10.921914100646973\n",
      "Step 26: local_loss: 9.360627174377441\n",
      "Step 27: local_loss: 13.378884315490723\n",
      "Step 28: local_loss: 10.340705871582031\n",
      "Step 29: local_loss: 11.081323623657227\n",
      "Step 30: local_loss: 8.291240692138672\n",
      "Step 31: local_loss: 10.740129470825195\n",
      "Step 32: local_loss: 8.438108444213867\n",
      "Step 33: local_loss: 9.882237434387207\n",
      "Step 34: local_loss: 9.833150863647461\n",
      "Step 35: local_loss: 14.255745887756348\n",
      "Step 36: local_loss: 11.931636810302734\n",
      "Step 37: local_loss: 8.063104629516602\n",
      "Step 38: local_loss: 8.096866607666016\n",
      "Step 39: local_loss: 8.67595100402832\n",
      "Step 40: local_loss: 9.791598320007324\n",
      "Step 41: local_loss: 10.314996719360352\n",
      "Step 42: local_loss: 8.030285835266113\n",
      "Step 43: local_loss: 10.035318374633789\n",
      "Step 44: local_loss: 8.420462608337402\n",
      "Step 45: local_loss: 7.697271823883057\n",
      "Step 46: local_loss: 10.103084564208984\n",
      "Step 47: local_loss: 11.565136909484863\n",
      "Step 48: local_loss: 10.73869514465332\n",
      "Step 49: local_loss: 8.181599617004395\n",
      "Step 50: local_loss: 10.932879447937012\n",
      "Step 51: local_loss: 7.055325984954834\n",
      "Step 52: local_loss: 9.38316535949707\n",
      "Step 53: local_loss: 10.783942222595215\n",
      "Step 54: local_loss: 10.318402290344238\n",
      "Step 55: local_loss: 7.6716628074646\n",
      "Step 56: local_loss: 8.601264953613281\n",
      "Step 57: local_loss: 8.390976905822754\n",
      "Step 58: local_loss: 10.25105094909668\n",
      "Step 59: local_loss: 8.283699989318848\n",
      "Step 60: local_loss: 7.954024314880371\n",
      "Step 61: local_loss: 5.972538471221924\n",
      "Step 62: local_loss: 9.862475395202637\n",
      "Step 63: local_loss: 9.044878959655762\n",
      "Step 64: local_loss: 10.996859550476074\n",
      "Step 65: local_loss: 9.68891716003418\n",
      "Step 66: local_loss: 10.79008674621582\n",
      "Step 67: local_loss: 7.033543109893799\n",
      "Step 68: local_loss: 7.307104587554932\n",
      "Step 69: local_loss: 7.9284563064575195\n",
      "Step 70: local_loss: 9.527253150939941\n",
      "Step 71: local_loss: 12.24260425567627\n",
      "Step 72: local_loss: 7.9588189125061035\n",
      "Step 73: local_loss: 8.953180313110352\n",
      "Step 74: local_loss: 11.29676628112793\n",
      "Step 75: local_loss: 8.04564380645752\n",
      "Step 76: local_loss: 9.049400329589844\n",
      "Step 77: local_loss: 11.272741317749023\n",
      "Step 78: local_loss: 10.15265941619873\n",
      "Step 79: local_loss: 8.457711219787598\n",
      "Step 80: local_loss: 10.114459991455078\n",
      "Step 81: local_loss: 7.699955463409424\n",
      "Step 82: local_loss: 7.563399314880371\n",
      "Step 83: local_loss: 8.955653190612793\n",
      "Step 84: local_loss: 11.363556861877441\n",
      "Step 85: local_loss: 10.524266242980957\n",
      "Step 86: local_loss: 9.246088027954102\n",
      "Step 87: local_loss: 9.346259117126465\n",
      "Step 88: local_loss: 9.505013465881348\n",
      "Step 89: local_loss: 10.241506576538086\n",
      "Step 90: local_loss: 6.3213324546813965\n",
      "Step 91: local_loss: 7.404004096984863\n",
      "Step 92: local_loss: 9.174269676208496\n",
      "Step 93: local_loss: 12.233236312866211\n",
      "Step 94: local_loss: 8.369770050048828\n",
      "Step 95: local_loss: 8.53805923461914\n",
      "Step 96: local_loss: 9.436273574829102\n",
      "Step 97: local_loss: 9.722528457641602\n",
      "Step 98: local_loss: 8.01452922821045\n",
      "Step 99: local_loss: 9.260799407958984\n",
      "Step 100: local_loss: 8.718794822692871\n",
      "Step 101: local_loss: 8.469243049621582\n",
      "Step 102: local_loss: 12.014888763427734\n",
      "Step 103: local_loss: 9.394207954406738\n",
      "Step 104: local_loss: 8.793542861938477\n",
      "Step 105: local_loss: 9.63886833190918\n",
      "Step 106: local_loss: 10.014315605163574\n",
      "Step 107: local_loss: 8.01866340637207\n",
      "Step 108: local_loss: 9.917943000793457\n",
      "Step 109: local_loss: 7.609738349914551\n",
      "Step 110: local_loss: 8.450844764709473\n",
      "Step 111: local_loss: 12.346051216125488\n",
      "Step 112: local_loss: 8.644468307495117\n",
      "Step 113: local_loss: 7.727044105529785\n",
      "Step 114: local_loss: 8.110681533813477\n",
      "Step 115: local_loss: 8.30958366394043\n",
      "Step 116: local_loss: 8.324383735656738\n",
      "Step 117: local_loss: 11.619694709777832\n",
      "Step 118: local_loss: 8.786282539367676\n",
      "Step 119: local_loss: 8.18400764465332\n",
      "Step 120: local_loss: 10.72626781463623\n",
      "Step 121: local_loss: 10.128437995910645\n",
      "Step 122: local_loss: 6.647252559661865\n",
      "Step 123: local_loss: 8.349786758422852\n",
      "Step 124: local_loss: 8.980542182922363\n",
      "Step 125: local_loss: 10.965241432189941\n",
      "Step 126: local_loss: 9.870732307434082\n",
      "Step 127: local_loss: 9.270956039428711\n",
      "Step 128: local_loss: 11.483393669128418\n",
      "Step 129: local_loss: 9.690605163574219\n",
      "Step 130: local_loss: 12.043787002563477\n",
      "Step 131: local_loss: 7.1920976638793945\n",
      "Step 132: local_loss: 9.871661186218262\n",
      "Step 133: local_loss: 10.032235145568848\n",
      "Step 134: local_loss: 7.383397102355957\n",
      "Step 135: local_loss: 8.466259956359863\n",
      "Step 136: local_loss: 8.257490158081055\n",
      "Step 137: local_loss: 9.282278060913086\n",
      "Step 138: local_loss: 8.45030689239502\n",
      "Step 139: local_loss: 9.825010299682617\n",
      "Step 140: local_loss: 7.264927387237549\n",
      "Step 141: local_loss: 11.327856063842773\n",
      "Step 142: local_loss: 8.949139595031738\n",
      "Step 143: local_loss: 7.3426737785339355\n",
      "Step 144: local_loss: 9.575054168701172\n",
      "Step 145: local_loss: 8.368748664855957\n",
      "Step 146: local_loss: 9.41506576538086\n",
      "Step 147: local_loss: 8.195673942565918\n",
      "Step 148: local_loss: 8.73306941986084\n",
      "Step 149: local_loss: 9.135547637939453\n",
      "Step 150: local_loss: 7.3283162117004395\n",
      "Step 151: local_loss: 7.507505893707275\n",
      "Step 152: local_loss: 7.583046913146973\n",
      "Step 153: local_loss: 12.741329193115234\n",
      "Step 154: local_loss: 8.781317710876465\n",
      "Step 155: local_loss: 8.938852310180664\n",
      "Step 156: local_loss: 6.951991081237793\n",
      "Step 157: local_loss: 10.973552703857422\n",
      "Step 158: local_loss: 9.216310501098633\n",
      "Step 159: local_loss: 13.220987319946289\n",
      "Step 160: local_loss: 9.871755599975586\n",
      "Step 161: local_loss: 8.139416694641113\n",
      "Step 162: local_loss: 6.608273506164551\n",
      "Step 163: local_loss: 8.322651863098145\n",
      "Step 164: local_loss: 8.911359786987305\n",
      "Step 165: local_loss: 9.398682594299316\n",
      "Step 166: local_loss: 12.067987442016602\n",
      "Step 167: local_loss: 8.271684646606445\n",
      "Step 168: local_loss: 8.919061660766602\n",
      "Step 169: local_loss: 8.133145332336426\n",
      "Step 170: local_loss: 10.315301895141602\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step 171: local_loss: 9.238398551940918\n",
      "Step 172: local_loss: 9.539369583129883\n",
      "Step 173: local_loss: 9.194730758666992\n",
      "Step 174: local_loss: 7.513513088226318\n",
      "Step 175: local_loss: 6.724207878112793\n",
      "Step 176: local_loss: 8.883722305297852\n",
      "Step 181: local_loss: 8.07938289642334\n",
      "Step 182: local_loss: 7.873765468597412\n",
      "Step 183: local_loss: 7.337756633758545\n",
      "Step 184: local_loss: 8.264907836914062\n",
      "Step 185: local_loss: 13.73470687866211\n",
      "Step 186: local_loss: 5.846652030944824\n",
      "Step 187: local_loss: 6.956971645355225\n",
      "Step 188: local_loss: 7.28888463973999\n",
      "Step 189: local_loss: 8.103221893310547\n",
      "Step 190: local_loss: 6.248250484466553\n",
      "Step 191: local_loss: 8.42292594909668\n",
      "Step 192: local_loss: 7.754755973815918\n",
      "Step 193: local_loss: 10.677557945251465\n",
      "Step 194: local_loss: 9.490144729614258\n",
      "Step 195: local_loss: 10.022151947021484\n",
      "Step 196: local_loss: 11.54255199432373\n",
      "Step 197: local_loss: 7.482792854309082\n",
      "Step 198: local_loss: 8.617504119873047\n",
      "Step 199: local_loss: 12.234444618225098\n",
      "Step 200: local_loss: 7.660469055175781\n",
      "Step 201: local_loss: 7.485743522644043\n",
      "Step 202: local_loss: 11.612846374511719\n",
      "Step 203: local_loss: 11.09082317352295\n",
      "Step 204: local_loss: 7.76943302154541\n",
      "Step 205: local_loss: 9.198938369750977\n",
      "Step 206: local_loss: 7.50985860824585\n",
      "Step 207: local_loss: 9.932126998901367\n",
      "Step 208: local_loss: 9.019929885864258\n",
      "Step 209: local_loss: 10.789708137512207\n",
      "Step 210: local_loss: 8.168035507202148\n",
      "Step 211: local_loss: 9.360681533813477\n",
      "Step 212: local_loss: 9.408158302307129\n",
      "Step 213: local_loss: 10.856019020080566\n",
      "Step 214: local_loss: 9.710600852966309\n",
      "Step 215: local_loss: 10.456226348876953\n",
      "Step 216: local_loss: 10.03827953338623\n",
      "Step 217: local_loss: 9.673779487609863\n",
      "Step 218: local_loss: 8.664880752563477\n",
      "Step 219: local_loss: 8.361783027648926\n",
      "Step 220: local_loss: 9.708813667297363\n",
      "Step 221: local_loss: 9.623041152954102\n",
      "Step 222: local_loss: 8.01537799835205\n",
      "Step 223: local_loss: 8.032065391540527\n",
      "Step 224: local_loss: 8.853569984436035\n",
      "Step 225: local_loss: 8.534701347351074\n",
      "Step 226: local_loss: 6.90077018737793\n",
      "Step 227: local_loss: 7.364964962005615\n",
      "Step 228: local_loss: 7.662870407104492\n",
      "Step 229: local_loss: 9.264954566955566\n",
      "Step 230: local_loss: 11.628294944763184\n",
      "Step 231: local_loss: 7.913922309875488\n",
      "Step 232: local_loss: 10.090649604797363\n",
      "Step 233: local_loss: 7.2656731605529785\n",
      "Step 234: local_loss: 7.99653434753418\n",
      "Step 235: local_loss: 8.408061027526855\n",
      "Step 236: local_loss: 8.223834037780762\n",
      "Step 237: local_loss: 9.136955261230469\n",
      "Step 238: local_loss: 9.899744987487793\n",
      "Step 239: local_loss: 10.312639236450195\n",
      "Step 240: local_loss: 6.815633296966553\n",
      "Step 241: local_loss: 7.78911018371582\n",
      "Step 242: local_loss: 8.442964553833008\n",
      "Step 243: local_loss: 8.91333293914795\n",
      "Step 244: local_loss: 9.416862487792969\n",
      "Step 245: local_loss: 7.300610542297363\n",
      "Step 246: local_loss: 10.624307632446289\n",
      "Step 247: local_loss: 7.284721851348877\n",
      "Step 248: local_loss: 10.775712966918945\n",
      "Step 249: local_loss: 10.12326717376709\n",
      "Step 250: local_loss: 5.887439250946045\n",
      "Step 251: local_loss: 7.490283489227295\n",
      "Step 252: local_loss: 8.137619018554688\n",
      "Step 253: local_loss: 10.675333023071289\n",
      "Step 254: local_loss: 7.753442287445068\n",
      "Step 255: local_loss: 10.066539764404297\n",
      "Step 256: local_loss: 7.748103141784668\n",
      "Step 257: local_loss: 9.507905006408691\n",
      "Step 258: local_loss: 6.126621723175049\n",
      "Step 259: local_loss: 9.388653755187988\n",
      "Step 260: local_loss: 7.381729602813721\n",
      "Step 261: local_loss: 8.55847454071045\n",
      "Step 262: local_loss: 9.174246788024902\n",
      "Step 263: local_loss: 8.922107696533203\n",
      "Step 264: local_loss: 10.215951919555664\n",
      "Step 265: local_loss: 7.5798139572143555\n",
      "Step 266: local_loss: 7.4886698722839355\n",
      "Step 267: local_loss: 11.149035453796387\n",
      "Step 268: local_loss: 7.808409214019775\n",
      "Step 269: local_loss: 8.757580757141113\n",
      "Step 270: local_loss: 8.992485046386719\n",
      "Step 271: local_loss: 7.83219051361084\n",
      "Step 272: local_loss: 9.66384506225586\n",
      "Step 273: local_loss: 7.759762763977051\n",
      "Step 274: local_loss: 9.91142463684082\n",
      "Step 275: local_loss: 10.035689353942871\n",
      "Step 276: local_loss: 8.026152610778809\n",
      "Step 277: local_loss: 8.37386703491211\n",
      "Step 278: local_loss: 8.092340469360352\n",
      "Step 279: local_loss: 10.639938354492188\n",
      "Step 280: local_loss: 7.297327041625977\n",
      "Step 281: local_loss: 8.02805233001709\n",
      "Step 282: local_loss: 8.875385284423828\n",
      "Step 283: local_loss: 7.836738586425781\n",
      "Step 284: local_loss: 6.831245422363281\n",
      "Step 285: local_loss: 5.124497890472412\n",
      "Step 286: local_loss: 11.834582328796387\n",
      "Step 287: local_loss: 9.984577178955078\n",
      "Step 288: local_loss: 9.588356971740723\n",
      "Step 289: local_loss: 7.933169364929199\n",
      "Step 290: local_loss: 7.239158630371094\n",
      "Step 291: local_loss: 7.99153470993042\n",
      "Step 292: local_loss: 9.0271577835083\n",
      "Step 293: local_loss: 9.753538131713867\n",
      "Step 294: local_loss: 7.350142002105713\n",
      "Step 295: local_loss: 9.23859691619873\n",
      "Step 296: local_loss: 8.96475887298584\n",
      "Step 297: local_loss: 8.608991622924805\n",
      "Step 298: local_loss: 9.754679679870605\n",
      "Step 299: local_loss: 9.167686462402344\n",
      "Center node 10280 finished at step 300 with loss 9.1677\n",
      "\n",
      "=== Pretraining Epoch 8 (Center Node: 4189) ===\n",
      "Step 0: local_loss: 11.26318359375\n",
      "Step 1: local_loss: 11.324667930603027\n",
      "Step 2: local_loss: 9.402193069458008\n",
      "Step 3: local_loss: 11.898399353027344\n",
      "Step 4: local_loss: 12.837650299072266\n",
      "Step 5: local_loss: 12.155421257019043\n",
      "Step 6: local_loss: 7.562615871429443\n",
      "Step 7: local_loss: 9.629688262939453\n",
      "Step 8: local_loss: 10.6990966796875\n",
      "Step 9: local_loss: 11.628284454345703\n",
      "Step 10: local_loss: 13.247665405273438\n",
      "Step 11: local_loss: 12.951194763183594\n",
      "Step 12: local_loss: 10.693317413330078\n",
      "Step 13: local_loss: 10.630823135375977\n",
      "Step 14: local_loss: 10.983559608459473\n",
      "Step 15: local_loss: 9.985943794250488\n",
      "Step 16: local_loss: 11.459579467773438\n",
      "Step 17: local_loss: 12.636190414428711\n",
      "Step 18: local_loss: 8.171159744262695\n",
      "Step 19: local_loss: 12.3522367477417\n",
      "Step 20: local_loss: 13.317602157592773\n",
      "Step 21: local_loss: 11.508718490600586\n",
      "Step 22: local_loss: 12.61119270324707\n",
      "Step 23: local_loss: 11.824203491210938\n",
      "Step 24: local_loss: 8.906928062438965\n",
      "Step 25: local_loss: 7.8913164138793945\n",
      "Step 26: local_loss: 8.897069931030273\n",
      "Step 27: local_loss: 10.14087200164795\n",
      "Step 28: local_loss: 10.083451271057129\n",
      "Step 29: local_loss: 8.655691146850586\n",
      "Step 30: local_loss: 9.578487396240234\n",
      "Step 31: local_loss: 9.25901985168457\n",
      "Step 32: local_loss: 11.408466339111328\n",
      "Step 33: local_loss: 7.780807018280029\n",
      "Step 34: local_loss: 10.76711654663086\n",
      "Step 35: local_loss: 9.10218620300293\n",
      "Step 36: local_loss: 7.554070949554443\n",
      "Step 37: local_loss: 7.257600784301758\n",
      "Step 38: local_loss: 11.771265029907227\n",
      "Step 39: local_loss: 11.185636520385742\n",
      "Step 40: local_loss: 6.456361293792725\n",
      "Step 41: local_loss: 9.294637680053711\n",
      "Step 42: local_loss: 9.001622200012207\n",
      "Step 43: local_loss: 9.94005298614502\n",
      "Step 44: local_loss: 9.629862785339355\n",
      "Step 45: local_loss: 9.106112480163574\n",
      "Step 46: local_loss: 7.845654487609863\n",
      "Step 47: local_loss: 9.916016578674316\n",
      "Step 48: local_loss: 15.295158386230469\n",
      "Step 49: local_loss: 9.17534065246582\n",
      "Step 50: local_loss: 12.7645263671875\n",
      "Step 51: local_loss: 8.158550262451172\n",
      "Step 52: local_loss: 7.185729026794434\n",
      "Step 53: local_loss: 8.80695629119873\n",
      "Step 54: local_loss: 6.318194389343262\n",
      "Step 55: local_loss: 9.374451637268066\n",
      "Step 56: local_loss: 10.004927635192871\n",
      "Step 57: local_loss: 12.951868057250977\n",
      "Step 58: local_loss: 10.586729049682617\n",
      "Step 59: local_loss: 9.163385391235352\n",
      "Step 60: local_loss: 7.123809337615967\n",
      "Step 61: local_loss: 8.286127090454102\n",
      "Step 62: local_loss: 9.213153839111328\n",
      "Step 63: local_loss: 10.875190734863281\n",
      "Step 64: local_loss: 9.311260223388672\n",
      "Step 65: local_loss: 10.163641929626465\n",
      "Step 66: local_loss: 7.765453815460205\n",
      "Step 67: local_loss: 7.175294399261475\n",
      "Step 68: local_loss: 8.590108871459961\n",
      "Step 69: local_loss: 7.474461078643799\n",
      "Step 70: local_loss: 10.261292457580566\n",
      "Step 71: local_loss: 10.884946823120117\n",
      "Step 72: local_loss: 8.931944847106934\n",
      "Step 73: local_loss: 10.063779830932617\n",
      "Step 74: local_loss: 6.169332504272461\n",
      "Step 75: local_loss: 9.059536933898926\n",
      "Step 76: local_loss: 9.00725269317627\n",
      "Step 77: local_loss: 6.502910137176514\n",
      "Step 78: local_loss: 9.648346900939941\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step 79: local_loss: 11.47799301147461\n",
      "Step 80: local_loss: 10.108076095581055\n",
      "Step 81: local_loss: 10.991312980651855\n",
      "Step 82: local_loss: 9.097896575927734\n",
      "Step 83: local_loss: 11.369247436523438\n",
      "Step 84: local_loss: 8.503866195678711\n",
      "Step 85: local_loss: 13.82740306854248\n",
      "Step 86: local_loss: 7.847480773925781\n",
      "Step 87: local_loss: 8.36434268951416\n",
      "Step 88: local_loss: 9.640134811401367\n",
      "Step 89: local_loss: 6.910409450531006\n",
      "Step 90: local_loss: 8.483800888061523\n",
      "Step 91: local_loss: 12.443209648132324\n",
      "Step 92: local_loss: 7.275618553161621\n",
      "Step 93: local_loss: 8.374249458312988\n",
      "Step 94: local_loss: 8.814844131469727\n",
      "Step 95: local_loss: 9.486703872680664\n",
      "Step 96: local_loss: 9.58370590209961\n",
      "Step 97: local_loss: 6.2171759605407715\n",
      "Step 98: local_loss: 9.22580623626709\n",
      "Step 99: local_loss: 7.970889091491699\n",
      "Step 100: local_loss: 6.514233589172363\n",
      "Step 101: local_loss: 6.288114547729492\n",
      "Step 102: local_loss: 8.671853065490723\n",
      "Step 103: local_loss: 7.864522933959961\n",
      "Step 104: local_loss: 11.526225090026855\n",
      "Step 105: local_loss: 8.397781372070312\n",
      "Step 106: local_loss: 6.867977619171143\n",
      "Step 107: local_loss: 9.713896751403809\n",
      "Step 108: local_loss: 9.062089920043945\n",
      "Step 109: local_loss: 6.850495338439941\n",
      "Step 110: local_loss: 8.402987480163574\n",
      "Step 111: local_loss: 6.735164165496826\n",
      "Step 112: local_loss: 7.606644630432129\n",
      "Step 113: local_loss: 7.596896648406982\n",
      "Step 114: local_loss: 5.176031589508057\n",
      "Step 115: local_loss: 8.868770599365234\n",
      "Step 116: local_loss: 6.561655521392822\n",
      "Step 117: local_loss: 6.212316989898682\n",
      "Step 118: local_loss: 9.308728218078613\n",
      "Step 119: local_loss: 8.0287504196167\n",
      "Step 120: local_loss: 5.463557720184326\n",
      "Step 121: local_loss: 7.553305149078369\n",
      "Step 122: local_loss: 6.814523696899414\n",
      "Step 123: local_loss: 6.416109561920166\n",
      "Step 124: local_loss: 6.6793670654296875\n",
      "Step 125: local_loss: 6.272775173187256\n",
      "Step 126: local_loss: 4.843507289886475\n",
      "Step 127: local_loss: 9.378421783447266\n",
      "Step 128: local_loss: 7.305596828460693\n",
      "Step 129: local_loss: 4.769956588745117\n",
      "Step 130: local_loss: 7.037210941314697\n",
      "Step 131: local_loss: 6.620328426361084\n",
      "Step 132: local_loss: 4.893731117248535\n",
      "Step 133: local_loss: 12.906046867370605\n",
      "Step 134: local_loss: 7.996410846710205\n",
      "Step 135: local_loss: 5.905375003814697\n",
      "Step 136: local_loss: 8.468046188354492\n",
      "Step 137: local_loss: 6.113516330718994\n",
      "Step 138: local_loss: 5.137661457061768\n",
      "Step 139: local_loss: 6.084097385406494\n",
      "Step 140: local_loss: 5.8135905265808105\n",
      "Step 141: local_loss: 5.291052341461182\n",
      "Step 142: local_loss: 7.449719429016113\n",
      "Step 143: local_loss: 8.292182922363281\n",
      "Step 144: local_loss: 6.842994213104248\n",
      "Step 145: local_loss: 8.925341606140137\n",
      "Step 146: local_loss: 11.018041610717773\n",
      "Step 147: local_loss: 5.773353576660156\n",
      "Step 148: local_loss: 8.500646591186523\n",
      "Step 149: local_loss: 5.783683776855469\n",
      "Step 150: local_loss: 7.994166851043701\n",
      "Step 151: local_loss: 9.656122207641602\n",
      "Step 152: local_loss: 7.607928276062012\n",
      "Step 153: local_loss: 7.398314952850342\n",
      "Step 154: local_loss: 7.764307498931885\n",
      "Step 155: local_loss: 8.257697105407715\n",
      "Step 156: local_loss: 6.478662014007568\n",
      "Step 157: local_loss: 7.751179218292236\n",
      "Step 158: local_loss: 8.16795825958252\n",
      "Step 159: local_loss: 6.88599157333374\n",
      "Step 160: local_loss: 7.08990478515625\n",
      "Step 161: local_loss: 6.955203533172607\n",
      "Step 162: local_loss: 6.542419910430908\n",
      "Step 163: local_loss: 7.312117099761963\n",
      "Step 164: local_loss: 6.226009368896484\n",
      "Step 165: local_loss: 6.852250576019287\n",
      "Step 166: local_loss: 8.164852142333984\n",
      "Step 167: local_loss: 7.632005214691162\n",
      "Step 168: local_loss: 6.28303861618042\n",
      "Step 169: local_loss: 4.897537708282471\n",
      "Step 170: local_loss: 6.228384017944336\n",
      "Step 171: local_loss: 5.492880821228027\n",
      "Step 172: local_loss: 7.840859413146973\n",
      "Step 173: local_loss: 4.823144912719727\n",
      "Step 174: local_loss: 7.28915548324585\n",
      "Step 175: local_loss: 5.296550750732422\n",
      "Step 176: local_loss: 5.021549224853516\n",
      "Step 177: local_loss: 6.9524736404418945\n",
      "Step 178: local_loss: 6.286876678466797\n",
      "Step 179: local_loss: 5.883884429931641\n",
      "Step 180: local_loss: 6.232010364532471\n",
      "Step 181: local_loss: 8.463534355163574\n",
      "Step 182: local_loss: 7.529122352600098\n",
      "Step 187: local_loss: 7.679947376251221\n",
      "Step 188: local_loss: 7.549134731292725\n",
      "Step 189: local_loss: 5.717484951019287\n",
      "Step 190: local_loss: 6.99213171005249\n",
      "Step 191: local_loss: 6.2477641105651855\n",
      "Step 192: local_loss: 7.03410530090332\n",
      "Step 193: local_loss: 4.782902240753174\n",
      "Step 194: local_loss: 8.913374900817871\n",
      "Step 195: local_loss: 6.020391941070557\n",
      "Step 196: local_loss: 4.80478572845459\n",
      "Step 197: local_loss: 6.705356121063232\n",
      "Step 198: local_loss: 5.9770731925964355\n",
      "Step 199: local_loss: 5.663078784942627\n",
      "Step 200: local_loss: 5.58479118347168\n",
      "Step 201: local_loss: 6.54026460647583\n",
      "Step 202: local_loss: 8.29647445678711\n",
      "Step 203: local_loss: 6.226174831390381\n",
      "Step 204: local_loss: 10.826574325561523\n",
      "Step 205: local_loss: 9.444364547729492\n",
      "Step 206: local_loss: 7.247705459594727\n",
      "Step 207: local_loss: 7.581499099731445\n",
      "Step 208: local_loss: 8.126296043395996\n",
      "Step 209: local_loss: 6.532872676849365\n",
      "Step 210: local_loss: 5.086203098297119\n",
      "Step 211: local_loss: 6.886697292327881\n",
      "Step 212: local_loss: 6.251048564910889\n",
      "Step 213: local_loss: 8.730555534362793\n",
      "Step 214: local_loss: 6.059574604034424\n",
      "Step 215: local_loss: 8.966325759887695\n",
      "Step 216: local_loss: 5.701906681060791\n",
      "Step 217: local_loss: 6.33650016784668\n",
      "Step 218: local_loss: 7.817927837371826\n",
      "Step 219: local_loss: 7.052121639251709\n",
      "Step 220: local_loss: 5.702493190765381\n",
      "Step 221: local_loss: 5.374724388122559\n",
      "Step 222: local_loss: 6.322324752807617\n",
      "Step 223: local_loss: 8.295234680175781\n",
      "Step 224: local_loss: 6.522176265716553\n",
      "Step 225: local_loss: 7.460253715515137\n",
      "Step 226: local_loss: 4.2414398193359375\n",
      "Step 227: local_loss: 6.897943496704102\n",
      "Step 228: local_loss: 8.727420806884766\n",
      "Step 229: local_loss: 4.840058326721191\n",
      "Step 230: local_loss: 8.043038368225098\n",
      "Step 231: local_loss: 5.467284202575684\n",
      "Step 232: local_loss: 7.654506206512451\n",
      "Step 233: local_loss: 7.23586893081665\n",
      "Step 234: local_loss: 6.094318866729736\n",
      "Step 235: local_loss: 5.295786380767822\n",
      "Step 236: local_loss: 6.740086555480957\n",
      "Step 237: local_loss: 7.936592102050781\n",
      "Step 238: local_loss: 7.866469860076904\n",
      "Step 239: local_loss: 6.999751091003418\n",
      "Step 240: local_loss: 6.121704578399658\n",
      "Step 241: local_loss: 6.740027904510498\n",
      "Step 242: local_loss: 5.9533305168151855\n",
      "Step 243: local_loss: 7.701873302459717\n",
      "Step 244: local_loss: 9.469975471496582\n",
      "Step 245: local_loss: 5.659491062164307\n",
      "Step 246: local_loss: 5.823995590209961\n",
      "Step 247: local_loss: 6.059932231903076\n",
      "Step 248: local_loss: 7.851814270019531\n",
      "Step 249: local_loss: 7.972836494445801\n",
      "Step 250: local_loss: 6.17717170715332\n",
      "Step 251: local_loss: 9.237174987792969\n",
      "Step 252: local_loss: 6.429960250854492\n",
      "Step 253: local_loss: 6.090242862701416\n",
      "Step 254: local_loss: 6.038525581359863\n",
      "Step 255: local_loss: 6.88278865814209\n",
      "Step 256: local_loss: 4.699477195739746\n",
      "Step 257: local_loss: 5.726412296295166\n",
      "Step 258: local_loss: 6.979889392852783\n",
      "Step 259: local_loss: 5.937618255615234\n",
      "Step 260: local_loss: 8.185009002685547\n",
      "Step 261: local_loss: 5.188616752624512\n",
      "Step 262: local_loss: 6.327007293701172\n",
      "Step 263: local_loss: 7.5676374435424805\n",
      "Step 264: local_loss: 7.938318729400635\n",
      "Step 265: local_loss: 6.074336051940918\n",
      "Step 266: local_loss: 8.4466552734375\n",
      "Step 267: local_loss: 6.633378505706787\n",
      "Step 268: local_loss: 6.3447113037109375\n",
      "Step 269: local_loss: 6.677555561065674\n",
      "Step 270: local_loss: 7.520778656005859\n",
      "Step 271: local_loss: 6.421367645263672\n",
      "Step 272: local_loss: 7.571834564208984\n",
      "Step 273: local_loss: 5.638577461242676\n",
      "Step 274: local_loss: 7.005830764770508\n",
      "Step 275: local_loss: 6.238964080810547\n",
      "Step 276: local_loss: 6.764681339263916\n",
      "Step 277: local_loss: 6.8782734870910645\n",
      "Step 278: local_loss: 4.522693157196045\n",
      "Step 279: local_loss: 4.174657821655273\n",
      "Step 280: local_loss: 4.52664041519165\n",
      "Step 281: local_loss: 6.861603260040283\n",
      "Step 282: local_loss: 5.900689125061035\n",
      "Step 283: local_loss: 7.223396301269531\n",
      "Step 284: local_loss: 6.483818531036377\n",
      "Step 285: local_loss: 6.08632230758667\n",
      "Step 286: local_loss: 6.845053672790527\n",
      "Step 287: local_loss: 6.322165012359619\n",
      "Step 288: local_loss: 7.429718017578125\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step 289: local_loss: 6.671483039855957\n",
      "Step 290: local_loss: 6.299442768096924\n",
      "Step 291: local_loss: 6.984711170196533\n",
      "Step 292: local_loss: 6.8935089111328125\n",
      "Step 293: local_loss: 6.129671573638916\n",
      "Step 294: local_loss: 4.764011859893799\n",
      "Step 295: local_loss: 6.581520080566406\n",
      "Step 296: local_loss: 5.888411045074463\n",
      "Step 297: local_loss: 6.701618671417236\n",
      "Step 298: local_loss: 4.717495918273926\n",
      "Step 299: local_loss: 5.4744062423706055\n",
      "Center node 4189 finished at step 300 with loss 5.4744\n",
      "\n",
      "=== Pretraining Epoch 9 (Center Node: 3682) ===\n",
      "Step 0: local_loss: 8.037359237670898\n",
      "Step 1: local_loss: 6.616797924041748\n",
      "Step 2: local_loss: 8.037057876586914\n",
      "Step 3: local_loss: 7.503168106079102\n",
      "Step 4: local_loss: 9.440680503845215\n",
      "Step 5: local_loss: 10.508880615234375\n",
      "Step 6: local_loss: 11.215688705444336\n",
      "Step 7: local_loss: 7.2520833015441895\n",
      "Step 8: local_loss: 9.959465026855469\n",
      "Step 9: local_loss: 10.894954681396484\n",
      "Step 10: local_loss: 9.280314445495605\n",
      "Step 11: local_loss: 5.296666622161865\n",
      "Step 12: local_loss: 9.874454498291016\n",
      "Step 13: local_loss: 8.137066841125488\n",
      "Step 14: local_loss: 6.550178527832031\n",
      "Step 15: local_loss: 6.591969966888428\n",
      "Step 16: local_loss: 7.176504611968994\n",
      "Step 17: local_loss: 10.612858772277832\n",
      "Step 18: local_loss: 10.147256851196289\n",
      "Step 19: local_loss: 6.902614116668701\n",
      "Step 20: local_loss: 6.574339866638184\n",
      "Step 21: local_loss: 8.014467239379883\n",
      "Step 22: local_loss: 9.520127296447754\n",
      "Step 23: local_loss: 5.981522083282471\n",
      "Step 24: local_loss: 11.34304428100586\n",
      "Step 25: local_loss: 8.469393730163574\n",
      "Step 26: local_loss: 9.367116928100586\n",
      "Step 27: local_loss: 6.992095470428467\n",
      "Step 28: local_loss: 7.398940563201904\n",
      "Step 29: local_loss: 10.41910171508789\n",
      "Step 30: local_loss: 5.690165042877197\n",
      "Step 31: local_loss: 9.136495590209961\n",
      "Step 36: local_loss: 8.221882820129395\n",
      "Step 37: local_loss: 8.757806777954102\n",
      "Step 38: local_loss: 6.0945539474487305\n",
      "Step 39: local_loss: 9.131683349609375\n",
      "Step 40: local_loss: 7.98649787902832\n",
      "Step 41: local_loss: 7.638252258300781\n",
      "Step 42: local_loss: 6.968204975128174\n",
      "Step 43: local_loss: 9.05976390838623\n",
      "Step 44: local_loss: 4.892670154571533\n",
      "Step 45: local_loss: 8.258476257324219\n",
      "Step 46: local_loss: 5.42769193649292\n",
      "Step 47: local_loss: 5.976167678833008\n",
      "Step 48: local_loss: 8.600370407104492\n",
      "Step 49: local_loss: 7.181361198425293\n",
      "Step 50: local_loss: 8.116056442260742\n",
      "Step 51: local_loss: 6.819281578063965\n",
      "Step 52: local_loss: 9.448582649230957\n",
      "Step 53: local_loss: 6.043980598449707\n",
      "Step 54: local_loss: 6.869178771972656\n",
      "Step 55: local_loss: 4.820050239562988\n",
      "Step 56: local_loss: 6.503729820251465\n",
      "Step 57: local_loss: 10.933923721313477\n",
      "Step 58: local_loss: 7.515183925628662\n",
      "Step 59: local_loss: 8.766119956970215\n",
      "Step 60: local_loss: 5.642993450164795\n",
      "Step 61: local_loss: 6.050136566162109\n",
      "Step 62: local_loss: 5.663090229034424\n",
      "Step 63: local_loss: 7.944736003875732\n",
      "Step 64: local_loss: 6.082043647766113\n",
      "Step 65: local_loss: 8.215633392333984\n",
      "Step 66: local_loss: 8.858680725097656\n",
      "Step 67: local_loss: 6.69476318359375\n",
      "Step 68: local_loss: 8.943122863769531\n",
      "Step 69: local_loss: 6.090895175933838\n",
      "Step 70: local_loss: 7.095247745513916\n",
      "Step 71: local_loss: 5.701039791107178\n",
      "Step 72: local_loss: 7.7277913093566895\n",
      "Step 73: local_loss: 4.984421730041504\n",
      "Step 74: local_loss: 5.594653129577637\n",
      "Step 75: local_loss: 7.479974746704102\n",
      "Step 76: local_loss: 5.470666885375977\n",
      "Step 77: local_loss: 6.858246326446533\n",
      "Step 78: local_loss: 4.852211952209473\n",
      "Step 79: local_loss: 4.9016218185424805\n",
      "Step 80: local_loss: 5.188028812408447\n",
      "Step 81: local_loss: 7.633160591125488\n",
      "Step 82: local_loss: 5.600087642669678\n",
      "Step 83: local_loss: 7.5829176902771\n",
      "Step 84: local_loss: 4.679341793060303\n",
      "Step 85: local_loss: 6.011157512664795\n",
      "Step 86: local_loss: 6.372161388397217\n",
      "Step 87: local_loss: 5.300111293792725\n",
      "Step 88: local_loss: 6.75961446762085\n",
      "Step 89: local_loss: 5.526482582092285\n",
      "Step 90: local_loss: 7.591247081756592\n",
      "Step 91: local_loss: 5.504918098449707\n",
      "Step 92: local_loss: 6.026803016662598\n",
      "Step 93: local_loss: 6.847431659698486\n",
      "Step 94: local_loss: 4.534216403961182\n",
      "Step 95: local_loss: 6.012954235076904\n",
      "Step 96: local_loss: 4.970492839813232\n",
      "Step 97: local_loss: 4.389504909515381\n",
      "Step 98: local_loss: 6.096848011016846\n",
      "Step 99: local_loss: 7.340934753417969\n",
      "Step 100: local_loss: 4.856576919555664\n",
      "Step 101: local_loss: 6.324276447296143\n",
      "Step 102: local_loss: 5.467502117156982\n",
      "Step 103: local_loss: 6.087495803833008\n",
      "Step 104: local_loss: 7.61989688873291\n",
      "Step 105: local_loss: 7.176851749420166\n",
      "Step 106: local_loss: 5.065593242645264\n",
      "Step 107: local_loss: 5.310556411743164\n",
      "Step 108: local_loss: 5.33851432800293\n",
      "Step 109: local_loss: 3.982017755508423\n",
      "Step 110: local_loss: 6.913909435272217\n",
      "Step 111: local_loss: 6.461829662322998\n",
      "Step 112: local_loss: 5.915866374969482\n",
      "Step 113: local_loss: 6.278805255889893\n",
      "Step 114: local_loss: 6.849207401275635\n",
      "Step 115: local_loss: 5.595758438110352\n",
      "Step 116: local_loss: 7.167945384979248\n",
      "Step 117: local_loss: 5.663453578948975\n",
      "Step 118: local_loss: 3.932666063308716\n",
      "Step 119: local_loss: 5.268294334411621\n",
      "Step 120: local_loss: 4.888467788696289\n",
      "Step 121: local_loss: 3.6819586753845215\n",
      "Step 122: local_loss: 5.74593448638916\n",
      "Step 123: local_loss: 6.197916507720947\n",
      "Step 124: local_loss: 4.963679313659668\n",
      "Step 125: local_loss: 6.276733875274658\n",
      "Step 126: local_loss: 6.966554164886475\n",
      "Step 127: local_loss: 5.057409286499023\n",
      "Step 128: local_loss: 5.428274631500244\n",
      "Step 129: local_loss: 5.8409953117370605\n",
      "Step 130: local_loss: 3.8063626289367676\n",
      "Step 131: local_loss: 5.657082557678223\n",
      "Step 132: local_loss: 4.1657938957214355\n",
      "Step 133: local_loss: 4.524501800537109\n",
      "Step 134: local_loss: 4.333383083343506\n",
      "Step 135: local_loss: 4.096438407897949\n",
      "Step 136: local_loss: 5.562521457672119\n",
      "Step 137: local_loss: 4.545116901397705\n",
      "Step 138: local_loss: 4.453648567199707\n",
      "Step 139: local_loss: 4.954380035400391\n",
      "Step 140: local_loss: 4.246524810791016\n",
      "Step 141: local_loss: 5.328314781188965\n",
      "Step 142: local_loss: 3.9361279010772705\n",
      "Step 143: local_loss: 4.044310569763184\n",
      "Step 144: local_loss: 4.744545936584473\n",
      "Step 145: local_loss: 5.2755913734436035\n",
      "Step 146: local_loss: 5.480915069580078\n",
      "Step 147: local_loss: 4.239855766296387\n",
      "Step 148: local_loss: 4.354543209075928\n",
      "Step 149: local_loss: 3.884293556213379\n",
      "Step 150: local_loss: 6.78657341003418\n",
      "Step 151: local_loss: 4.833784103393555\n",
      "Step 152: local_loss: 4.153848648071289\n",
      "Step 153: local_loss: 4.420724868774414\n",
      "Step 154: local_loss: 6.5595622062683105\n",
      "Step 155: local_loss: 4.79063081741333\n",
      "Step 156: local_loss: 4.424715995788574\n",
      "Step 157: local_loss: 3.9124035835266113\n",
      "Step 158: local_loss: 4.148581504821777\n",
      "Step 159: local_loss: 4.48539400100708\n",
      "Step 160: local_loss: 4.425706386566162\n",
      "Step 161: local_loss: 4.4959716796875\n",
      "Step 162: local_loss: 3.489631414413452\n",
      "Step 163: local_loss: 5.010130882263184\n",
      "Step 164: local_loss: 4.65017032623291\n",
      "Step 165: local_loss: 4.99139928817749\n",
      "Step 166: local_loss: 4.5624542236328125\n",
      "Step 167: local_loss: 4.321290493011475\n",
      "Step 168: local_loss: 6.459095478057861\n",
      "Step 169: local_loss: 3.8108108043670654\n",
      "Step 170: local_loss: 4.515786170959473\n",
      "Step 171: local_loss: 4.901260852813721\n",
      "Step 172: local_loss: 5.998241901397705\n",
      "Step 173: local_loss: 4.492423057556152\n",
      "Step 174: local_loss: 5.6956892013549805\n",
      "Step 175: local_loss: 4.601504325866699\n",
      "Step 176: local_loss: 4.091935157775879\n",
      "Step 177: local_loss: 5.183026313781738\n",
      "Step 178: local_loss: 5.486415863037109\n",
      "Step 179: local_loss: 4.217460632324219\n",
      "Step 180: local_loss: 3.929673433303833\n",
      "Step 181: local_loss: 4.449202060699463\n",
      "Step 182: local_loss: 4.241168975830078\n",
      "Step 183: local_loss: 4.988788604736328\n",
      "Step 184: local_loss: 4.182931423187256\n",
      "Step 185: local_loss: 4.359382152557373\n",
      "Step 186: local_loss: 4.298892498016357\n",
      "Step 187: local_loss: 4.288263320922852\n",
      "Step 188: local_loss: 5.008509159088135\n",
      "Step 189: local_loss: 3.8103208541870117\n",
      "Step 190: local_loss: 3.3288652896881104\n",
      "Step 191: local_loss: 4.317531585693359\n",
      "Step 192: local_loss: 4.124873638153076\n",
      "Step 193: local_loss: 4.947734355926514\n",
      "Step 194: local_loss: 5.7803754806518555\n",
      "Step 195: local_loss: 3.7006895542144775\n",
      "Step 196: local_loss: 4.431723594665527\n",
      "Step 197: local_loss: 5.777367115020752\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Step 198: local_loss: 4.011373043060303\n",
      "Step 199: local_loss: 3.943800210952759\n",
      "Step 200: local_loss: 4.45395565032959\n",
      "Step 201: local_loss: 5.131066799163818\n",
      "Step 202: local_loss: 4.377695083618164\n",
      "Step 203: local_loss: 4.110675811767578\n",
      "Step 204: local_loss: 5.6210036277771\n",
      "Step 205: local_loss: 4.0629425048828125\n",
      "Step 206: local_loss: 4.171396732330322\n",
      "Step 207: local_loss: 6.340794563293457\n",
      "Step 208: local_loss: 4.299289703369141\n",
      "Step 209: local_loss: 4.4841766357421875\n",
      "Step 210: local_loss: 4.364166259765625\n",
      "Step 211: local_loss: 4.980685710906982\n",
      "Step 212: local_loss: 3.0908141136169434\n",
      "Step 213: local_loss: 5.795508861541748\n",
      "Step 214: local_loss: 3.5266506671905518\n",
      "Step 215: local_loss: 4.664776802062988\n",
      "Step 216: local_loss: 3.631559133529663\n",
      "Step 217: local_loss: 6.108334541320801\n",
      "Step 218: local_loss: 3.5026540756225586\n",
      "Step 219: local_loss: 4.184610843658447\n",
      "Step 220: local_loss: 3.1907293796539307\n",
      "Step 221: local_loss: 6.024537563323975\n",
      "Step 222: local_loss: 4.425282955169678\n",
      "Step 223: local_loss: 4.318334579467773\n",
      "Step 224: local_loss: 4.481563568115234\n",
      "Step 225: local_loss: 4.7716474533081055\n",
      "Step 226: local_loss: 3.756288528442383\n",
      "Step 227: local_loss: 5.40215539932251\n",
      "Step 228: local_loss: 4.376577377319336\n",
      "Step 229: local_loss: 4.820840835571289\n",
      "Step 230: local_loss: 6.411040782928467\n",
      "Step 231: local_loss: 4.962225437164307\n",
      "Step 232: local_loss: 5.06935977935791\n",
      "Step 233: local_loss: 3.6303982734680176\n",
      "Step 234: local_loss: 4.624288082122803\n",
      "Step 235: local_loss: 4.718266010284424\n",
      "Step 236: local_loss: 4.135792255401611\n",
      "Step 237: local_loss: 5.097017765045166\n",
      "Step 238: local_loss: 4.3229241371154785\n",
      "Step 239: local_loss: 4.376965522766113\n",
      "Step 240: local_loss: 4.499136447906494\n",
      "Step 241: local_loss: 6.60339879989624\n",
      "Step 242: local_loss: 3.8672940731048584\n",
      "Step 243: local_loss: 4.263368129730225\n",
      "Step 244: local_loss: 4.15752649307251\n",
      "Step 245: local_loss: 5.396935939788818\n",
      "Step 246: local_loss: 4.308600902557373\n",
      "Step 247: local_loss: 4.350072860717773\n",
      "Step 248: local_loss: 5.082338809967041\n",
      "Step 249: local_loss: 4.085370063781738\n",
      "Step 250: local_loss: 4.943338394165039\n",
      "Step 251: local_loss: 3.6687355041503906\n",
      "Step 252: local_loss: 3.587125778198242\n",
      "Step 253: local_loss: 3.8093526363372803\n",
      "Step 254: local_loss: 4.283278465270996\n",
      "Step 255: local_loss: 3.84041166305542\n",
      "Step 256: local_loss: 4.651256084442139\n",
      "Step 257: local_loss: 3.676846981048584\n",
      "Step 258: local_loss: 3.842895984649658\n",
      "Step 259: local_loss: 5.02404260635376\n",
      "Step 260: local_loss: 3.1171340942382812\n",
      "Step 261: local_loss: 3.7778162956237793\n",
      "Step 262: local_loss: 5.358842372894287\n",
      "Step 263: local_loss: 5.440846920013428\n",
      "Step 264: local_loss: 4.672074794769287\n",
      "Step 265: local_loss: 4.939542293548584\n",
      "Step 266: local_loss: 4.6366376876831055\n",
      "Step 267: local_loss: 4.353762626647949\n",
      "Step 268: local_loss: 5.614718914031982\n",
      "Step 269: local_loss: 4.640102386474609\n",
      "Step 270: local_loss: 5.25607442855835\n",
      "Step 271: local_loss: 3.8913159370422363\n",
      "Step 272: local_loss: 4.3087615966796875\n",
      "Step 273: local_loss: 4.0359601974487305\n",
      "Step 274: local_loss: 3.9521963596343994\n",
      "Step 275: local_loss: 4.85297966003418\n",
      "Step 276: local_loss: 6.124431133270264\n",
      "Step 277: local_loss: 4.6049604415893555\n",
      "Step 278: local_loss: 4.2288689613342285\n",
      "Step 279: local_loss: 4.012660026550293\n",
      "Step 280: local_loss: 4.05150032043457\n",
      "Step 281: local_loss: 4.78387975692749\n",
      "Step 282: local_loss: 3.813439130783081\n",
      "Step 283: local_loss: 5.177578449249268\n",
      "Step 284: local_loss: 4.334833145141602\n",
      "Step 285: local_loss: 3.0337109565734863\n",
      "Step 286: local_loss: 2.958427906036377\n",
      "Step 287: local_loss: 6.4314284324646\n",
      "Step 288: local_loss: 4.162344932556152\n",
      "Step 289: local_loss: 3.5590474605560303\n",
      "Step 290: local_loss: 4.379525184631348\n",
      "Step 291: local_loss: 3.5044195652008057\n",
      "Step 292: local_loss: 4.492239475250244\n",
      "Step 293: local_loss: 3.9298760890960693\n",
      "Step 294: local_loss: 4.073620796203613\n",
      "Step 295: local_loss: 3.275155782699585\n",
      "Step 296: local_loss: 3.654900312423706\n",
      "Step 297: local_loss: 4.100484371185303\n",
      "Step 298: local_loss: 4.1173996925354\n",
      "Step 299: local_loss: 4.688009262084961\n",
      "Center node 3682 finished at step 300 with loss 4.6880\n",
      "\n",
      "=== Starting Fine-tuning ===\n",
      "Epoch: 000 | Loss: 0.1604 | Val AUC: 0.7261 | Val F1: 0.7099\n",
      "Epoch: 005 | Loss: 0.1480 | Val AUC: 0.8185 | Val F1: 0.5544\n",
      "Epoch: 010 | Loss: 0.1657 | Val AUC: 0.8168 | Val F1: 0.4751\n",
      "Epoch: 015 | Loss: 0.0537 | Val AUC: 0.8143 | Val F1: 0.5660\n",
      "Epoch: 020 | Loss: 0.0007 | Val AUC: 0.8141 | Val F1: 0.5654\n",
      "Epoch: 025 | Loss: 0.0704 | Val AUC: 0.8222 | Val F1: 0.5962\n",
      "Epoch: 030 | Loss: 0.1593 | Val AUC: 0.8173 | Val F1: 0.5973\n",
      "Epoch: 035 | Loss: 0.0381 | Val AUC: 0.8260 | Val F1: 0.5941\n",
      "Epoch: 040 | Loss: 0.1008 | Val AUC: 0.8294 | Val F1: 0.6019\n",
      "Epoch: 045 | Loss: 0.0714 | Val AUC: 0.8280 | Val F1: 0.5861\n",
      "Epoch: 050 | Loss: 0.0313 | Val AUC: 0.8378 | Val F1: 0.5908\n",
      "Epoch: 055 | Loss: 0.0069 | Val AUC: 0.8335 | Val F1: 0.5868\n",
      "Epoch: 060 | Loss: 0.0031 | Val AUC: 0.8254 | Val F1: 0.6019\n",
      "Epoch: 065 | Loss: 0.0143 | Val AUC: 0.8228 | Val F1: 0.6208\n",
      "Epoch: 070 | Loss: 0.0197 | Val AUC: 0.8200 | Val F1: 0.6678\n",
      "Epoch: 075 | Loss: 0.0208 | Val AUC: 0.8222 | Val F1: 0.6678\n",
      "Epoch: 080 | Loss: 0.0473 | Val AUC: 0.8247 | Val F1: 0.6733\n",
      "Epoch: 085 | Loss: 0.0162 | Val AUC: 0.8251 | Val F1: 0.6773\n",
      "Epoch: 090 | Loss: 0.0064 | Val AUC: 0.8235 | Val F1: 0.6742\n",
      "Epoch: 095 | Loss: 0.0168 | Val AUC: 0.8201 | Val F1: 0.6733\n",
      "Epoch: 100 | Loss: 0.0052 | Val AUC: 0.8206 | Val F1: 0.6565\n",
      "Epoch: 105 | Loss: 0.0092 | Val AUC: 0.8251 | Val F1: 0.6583\n",
      "Epoch 00022: reducing learning rate of group 0 to 2.5000e-04.\n",
      "Epoch: 110 | Loss: 0.0067 | Val AUC: 0.8212 | Val F1: 0.6616\n",
      "Epoch: 115 | Loss: 0.0436 | Val AUC: 0.8211 | Val F1: 0.6616\n",
      "Epoch: 120 | Loss: 0.0101 | Val AUC: 0.8244 | Val F1: 0.6583\n",
      "Epoch: 125 | Loss: 0.0553 | Val AUC: 0.8212 | Val F1: 0.6649\n",
      "Epoch: 130 | Loss: 0.0022 | Val AUC: 0.8200 | Val F1: 0.6646\n",
      "Epoch: 135 | Loss: 0.0134 | Val AUC: 0.8181 | Val F1: 0.6678\n",
      "Epoch: 140 | Loss: 0.0510 | Val AUC: 0.8211 | Val F1: 0.6847\n",
      "Epoch: 145 | Loss: 0.1047 | Val AUC: 0.8177 | Val F1: 0.7103\n",
      "Epoch: 150 | Loss: 0.0112 | Val AUC: 0.8195 | Val F1: 0.7113\n",
      "Epoch: 155 | Loss: 0.0111 | Val AUC: 0.8205 | Val F1: 0.7131\n",
      "Epoch: 160 | Loss: 0.0059 | Val AUC: 0.8214 | Val F1: 0.7197\n",
      "Epoch 00033: reducing learning rate of group 0 to 1.2500e-04.\n",
      "Epoch: 165 | Loss: 0.0530 | Val AUC: 0.8247 | Val F1: 0.7169\n",
      "Epoch: 170 | Loss: 0.1436 | Val AUC: 0.8232 | Val F1: 0.7159\n",
      "Epoch: 175 | Loss: 0.0055 | Val AUC: 0.8274 | Val F1: 0.7139\n",
      "Epoch: 180 | Loss: 0.0868 | Val AUC: 0.8265 | Val F1: 0.7149\n",
      "Epoch: 185 | Loss: 0.0275 | Val AUC: 0.8275 | Val F1: 0.7167\n",
      "Epoch: 190 | Loss: 0.0066 | Val AUC: 0.8316 | Val F1: 0.7177\n",
      "Epoch: 195 | Loss: 0.0363 | Val AUC: 0.8310 | Val F1: 0.7167\n",
      "\n",
      "=== Final Test Results ===\n",
      "Test AUC: 0.8376 | Test AP: 0.4104 | Test F1: 0.5890 | G-mean: 0.3865\n"
     ]
    }
   ],
   "source": [
    "# 学习率调参\n",
    "\n",
    "\n",
    "# 参数设置\n",
    "args = {\n",
    "    \"dataset\": \"amazon\",\n",
    "#     \"dataset\": \"yelp\",\n",
    "    \"batch_size\": 8,\n",
    "    \"sample_size\": 50,\n",
    "    \"weight_decay\": 0.00005,\n",
    "    \"emb_size\": 32,\n",
    "    \"pretrain_epochs\": 50,\n",
    "    \"pretrain_lr\": 0.0003, #0.0005，\n",
    "    \"finetune_lr\": 0.0005,\n",
    "    \"num_epochs\": 200,\n",
    "    \"pretrain_patience\": 20,\n",
    "    \"patience\": 30,\n",
    "    \"tsne_weight\": 0.3,\n",
    "    \"weight\": 0.6,\n",
    "    \"layers\": 7,\n",
    "    \"test_size\": 0.6,\n",
    "    \"val_size\": 0.5,\n",
    "    \"layers_tree\": 7,\n",
    "    \"seed\": 76,\n",
    "    \"num_heads\": 4,\n",
    "    \"num_layers\": 2,\n",
    "    \"drop_rate\": 0.5\n",
    "}\n",
    "\n",
    "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n",
    "print(device)\n",
    "\n",
    "timestamp = time.strftime(\"%Y%m%d-%H%M%S\")\n",
    "writer = SummaryWriter(f'runs/{args[\"dataset\"]}_{timestamp}')\n",
    "\n",
    "print('loading data...')\n",
    "prefix = \"../../data/\"\n",
    "\n",
    "edge_indexs, feat_data, labels = load_data(args['dataset'], args['layers_tree'], prefix)\n",
    "\n",
    "np.random.seed(args['seed'])\n",
    "rd.seed(args['seed'])\n",
    "\n",
    "if args['dataset'] == 'yelp':\n",
    "    index = list(range(len(labels)))\n",
    "    idx_train_val, idx_test, y_train_val, y_test = train_test_split(index, labels, stratify=labels,\n",
    "                                                                    test_size=args['test_size'], random_state=2,\n",
    "                                                                    shuffle=True)\n",
    "    idx_train, idx_val, y_train, y_val = train_test_split(idx_train_val, y_train_val,\n",
    "                                                          stratify=y_train_val, test_size=args['val_size'],\n",
    "                                                          random_state=2, shuffle=True)\n",
    "    dist_path = os.path.join(prefix, \"YelpChi_shortest_distance.pkl\")\n",
    "elif args['dataset'] == 'amazon':\n",
    "    index = list(range(3305, len(labels)))\n",
    "    idx_train_val, idx_test, y_train_val, y_test = train_test_split(index, labels[3305:],\n",
    "                                                                    stratify=labels[3305:],\n",
    "                                                                    test_size=args['test_size'],\n",
    "                                                                    random_state=2, shuffle=True)\n",
    "    idx_train, idx_val, y_train, y_val = train_test_split(idx_train_val, y_train_val,\n",
    "                                                          stratify=y_train_val, test_size=args['val_size'],\n",
    "                                                          random_state=2, shuffle=True)\n",
    "    dist_path = os.path.join(prefix, \"Amazon_shortest_distance.pkl\")\n",
    "\n",
    "with open(dist_path, 'rb') as f:\n",
    "    dist_data = pickle.load(f)\n",
    "    dist_matrix = torch.tensor(dist_data['dist_matrix']).to(device)\n",
    "\n",
    "# 准备特征数据\n",
    "feat_data = torch.tensor(feat_data).float()\n",
    "# 特征归一化\n",
    "scaler = MinMaxScaler()\n",
    "feat_data = torch.tensor(scaler.fit_transform(feat_data)).float().to(device)\n",
    "\n",
    "# 初始化模型\n",
    "bsne_model = BSNE_Transformer(\n",
    "    in_feat=feat_data.shape[1],\n",
    "    out_feat=2,\n",
    "    relation_nums=len(edge_indexs),\n",
    "    d_model=64,\n",
    "    nhead=args['num_heads'],\n",
    "    num_layers=args['num_layers'],\n",
    "    dim_feedforward=256,\n",
    "    drop_rate=args['drop_rate']\n",
    ").to(device)\n",
    "\n",
    "# 将边索引转移到设备\n",
    "for edge_index in edge_indexs:\n",
    "    edge_index[0] = edge_index[0].to(device)\n",
    "    edge_index[1] = [tensor.to(device) for tensor in edge_index[1]]\n",
    "\n",
    "all_local_losses = []      # 每个epoch的batch本地损失列表\n",
    "all_global_losses = []     # 每个epoch的batch全局损失列表\n",
    "all_bsne_losses = []       # 每个epoch的总BSNE损失\n",
    "epoch_avg_local = []       # 每个epoch的平均本地损失\n",
    "epoch_avg_global = []      # 每个epoch的平均全局损失\n",
    "    \n",
    "    \n",
    "print(\"\\n=== Starting Pretraining ===\")\n",
    "\n",
    "bsne_model.classifier.requires_grad_(False)\n",
    "optimizer = torch.optim.AdamW(\n",
    "    filter(lambda p: p.requires_grad, bsne_model.parameters()),\n",
    "    lr=args['pretrain_lr'],\n",
    "    weight_decay=5e-5\n",
    ")\n",
    "pretrain_best_loss = float('inf')\n",
    "pretrain_no_improve = 0\n",
    "pretrain_early_stop = False\n",
    "\n",
    "temperature = 0.2  # 越小区分性越强\n",
    "loss_threshold = 0.5\n",
    "sample_size = 60\n",
    "max_steps_per_center = 300  # 防止死循环\n",
    "max_epochs = 10\n",
    "center_indices = list(range(feat_data.shape[0]))\n",
    "# 在每轮epoch前随机打乱中心点顺序\n",
    "rd.shuffle(center_indices)\n",
    "\n",
    "# 限制训练的中心点数量\n",
    "center_indices = center_indices[:max_epochs]\n",
    "\n",
    "\n",
    "for epoch, center_idx in enumerate(center_indices):\n",
    "    print(f\"\\n=== Pretraining Epoch {epoch} (Center Node: {center_idx}) ===\")\n",
    "    step = 0\n",
    "            \n",
    "    dist_row = dist_matrix[center_idx].cpu().numpy()\n",
    "    probs = np.exp(-dist_row / temperature)\n",
    "    \n",
    "    probs[center_idx] = 0\n",
    "    probs = probs / (probs.sum() + 1e-10) \n",
    "\n",
    "    available_nodes = len(dist_row) - 1\n",
    "    while True:\n",
    "        bsne_model.train()\n",
    "        optimizer.zero_grad()\n",
    "        \n",
    "        total_loss = 0.0  # 每个step都要重置\n",
    "        eps = 1e-10\n",
    "        \n",
    "        # 构建Bp子图\n",
    "        actual_sample_size = min(sample_size, available_nodes)\n",
    "        if actual_sample_size > 0:\n",
    "            neighbors = np.random.choice(len(dist_row), size=actual_sample_size, p=probs, replace=False)\n",
    "            bp_nodes = neighbors.tolist()\n",
    "        \n",
    "        # 计算Bp子图中所有节点的特征\n",
    "        bp_node_features = []\n",
    "        for node_idx in bp_nodes:\n",
    "            node_subgraph = create_node_subgraph(node_idx, feat_data, edge_indexs, device)\n",
    "            _, node_feature = bsne_model([node_subgraph])\n",
    "            bp_node_features.append(node_feature.squeeze(0))\n",
    "        bp_features = torch.stack(bp_node_features)\n",
    "        \n",
    "        center_node_subgraph = create_node_subgraph(center_idx, feat_data, edge_indexs, device)\n",
    "        _, center_feature = bsne_model([center_node_subgraph])\n",
    "        center_feature = center_feature.squeeze(0)\n",
    "        \n",
    "#         # 构建Bu子图\n",
    "#         if actual_sample_size > 0:\n",
    "#             neighbors = np.random.choice(len(dist_row), size=actual_sample_size, replace=False)\n",
    "#             bu_nodes = neighbors.tolist()\n",
    "            \n",
    "#         # 计算Bu子图中所有节点的特征\n",
    "#         bu_node_features = []\n",
    "#         for node_idx in bu_nodes:\n",
    "#             node_subgraph = create_node_subgraph(node_idx, feat_data, edge_indexs, device)\n",
    "#             _, node_feature = bsne_model([node_subgraph])\n",
    "#             bu_node_features.append(node_feature.squeeze(0))\n",
    "#         bu_features = torch.stack(bu_node_features)\n",
    "        \n",
    "        # 计算loss\n",
    "#         center_feature = bp_features[0]  # 中心节点是第一个\n",
    "\n",
    "        # 仅获取Bp子图中的其他节点（排除中心节点）\n",
    "#         other_bp_indices = bp_nodes\n",
    "#         other_bp_features = bp_features\n",
    "        \n",
    "#         P = probs[bp_nodes]\n",
    "        P = torch.tensor(probs[bp_nodes], device=device, dtype=torch.float32)\n",
    "\n",
    "        # 计算Q向量（欧式距离）\n",
    "        feat_dists_bp = torch.cdist(center_feature.unsqueeze(0), bp_features).squeeze(0)\n",
    "        Q = torch.softmax(-feat_dists_bp, dim=0)\n",
    "        \n",
    "#         log_ratio = torch.log(P + eps) - torch.log(Q + eps)\n",
    "        # 原局部loss计算公式\n",
    "#         log_ratio = torch.log(((P + eps)/(Q + eps)))\n",
    "#         loss_local = log_ratio.mean() \n",
    "\n",
    "        # 计算Q向量（点积）\n",
    "#         center_norm = F.normalize(center_feature, dim=0)\n",
    "#         other_norm = F.normalize(other_bp_features, dim=1)\n",
    "#         feat_dists_bp = center_norm @ other_norm.t()\n",
    "# #         Q = torch.softmax(feat_dists_bp.squeeze(0), dim=0)\n",
    "#         Q = feat_dists_bp/feat_dists_bp.sum()\n",
    "    \n",
    "#         M = 0.5 * (P + Q)\n",
    "#         loss_local = 0.5 * (P * torch.log(P/M) + Q * torch.log(Q/M)).sum()\n",
    "#         log_ratio = torch.abs(torch.log(P / (Q + eps)))\n",
    "        log_ratio = (torch.log(P/Q))**2\n",
    "        loss_local = log_ratio.mean() \n",
    "\n",
    "\n",
    "        # 获取全局距离并计算概率\n",
    "#         all_dists = dist_matrix[center_idx].cpu().numpy()\n",
    "# #         unnorm_probs_global = np.exp(-all_dists)\n",
    "#         unnorm_probs_global = np.power(10.0, -all_dists)\n",
    "#         unnorm_probs_global[center_idx] = 0  # 排除自身\n",
    "\n",
    "#         # 计算全局归一化常数\n",
    "#         Z_global = unnorm_probs_global.sum() + eps\n",
    "\n",
    "#         # 计算B_p节点的全局概率和\n",
    "#         bp_global_prob_sum = unnorm_probs_global[bp_nodes].sum() / Z_global\n",
    "\n",
    "#         # 计算k_Bp\n",
    "#         N = dist_matrix.shape[0]\n",
    "#         k_Bp = bp_global_prob_sum * (N / len(bp_nodes))\n",
    "\n",
    "#         bu_features_ = bu_features[1:]  # 排除中心节点\n",
    "#         feat_dists_bu = torch.cdist(center_feature.unsqueeze(0), bu_features).squeeze(0)\n",
    "#         sum_e_bu = torch.exp(-feat_dists_bu).sum()\n",
    "#         sum_e_bp = torch.exp(-feat_dists_bp).sum()\n",
    "        \n",
    "#         global_ratio = (sum_e_bu / (sum_e_bp + eps))\n",
    "        \n",
    "        \n",
    "#         loss_global = torch.abs(torch.log(global_ratio))\n",
    "#         #加上平方项\n",
    "#         loss_global = (torch.log(global_ratio.clamp(min=eps, max=1e10)))**2\n",
    "        \n",
    "#         total_loss += loss_local + loss_global\n",
    "#         total_loss.backward()\n",
    "        total_loss = loss_local\n",
    "        total_loss.backward()\n",
    "        optimizer.step()\n",
    "        \n",
    "        \n",
    "#         for name, parms in bsne_model.named_parameters():\n",
    "#             grad_value = parms.grad\n",
    "#             if grad_value is not None:\n",
    "#                 grad_mean = torch.mean(grad_value)\n",
    "#             else:\n",
    "#                 grad_mean = \"No gradient\"\n",
    "\n",
    "#             print(f'-->name: {name} '\n",
    "#                   f'--weight: {torch.mean(parms.data).item():.6f} '\n",
    "#                   f'-->grad_value: {grad_mean}')\n",
    "        \n",
    "#         print(\"P:\",P)\n",
    "#         print(\"Q:\",Q)\n",
    "#         print(f\"Step {step}: BSNE_Loss={total_loss.item():.4f},local_loss: {loss_local.item()},global_loss: {loss_global.item()}\")\n",
    "        print(f\"Step {step}: local_loss: {loss_local.item()}\")\n",
    "        step += 1\n",
    "        \n",
    "        if total_loss.item() < loss_threshold or step >= max_steps_per_center:\n",
    "            print(f\"Center node {center_idx} finished at step {step} with loss {total_loss.item():.4f}\")\n",
    "            break\n",
    "\n",
    "print(\"\\n=== Starting Fine-tuning ===\")\n",
    "bsne_model.classifier.requires_grad_(True)\n",
    "optimizer = torch.optim.AdamW(\n",
    "    bsne_model.parameters(),\n",
    "    lr=args['finetune_lr'],\n",
    "    weight_decay=5e-5\n",
    ")\n",
    "scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(\n",
    "    optimizer, mode='max', factor=0.5, patience=10, verbose=True\n",
    ")\n",
    "\n",
    "best_val_auc = 0.0\n",
    "best_model_state = None\n",
    "train_pos, train_neg = pos_neg_split(idx_train, y_train)\n",
    "\n",
    "no_improve_epochs = 0\n",
    "early_stop = False\n",
    "\n",
    "for epoch in range(args['num_epochs']):\n",
    "    if early_stop:\n",
    "        break\n",
    "\n",
    "    bsne_model.train()\n",
    "    total_loss = 0.0\n",
    "\n",
    "    # 采样中心节点\n",
    "    batch_centers = rd.sample(train_pos + train_neg, args['batch_size'])\n",
    "    subgraph_data = []\n",
    "\n",
    "    for xi in batch_centers:\n",
    "        subgraph = create_node_subgraph(xi, feat_data, edge_indexs, device)\n",
    "        #         print(len(subgraph['features']))\n",
    "        subgraph_data.append(subgraph)\n",
    "\n",
    "    optimizer.zero_grad()\n",
    "\n",
    "    center_logits, _ = bsne_model(subgraph_data)  # [B, 2]\n",
    "\n",
    "    # 提取中心节点标签\n",
    "    labels_center = torch.tensor([labels[xi] for xi in batch_centers]).to(device).long()\n",
    "\n",
    "    # 计算分类损失\n",
    "    cls_loss = F.nll_loss(center_logits, labels_center)\n",
    "\n",
    "    cls_loss.backward()\n",
    "    optimizer.step()\n",
    "\n",
    "    total_loss += cls_loss.item()\n",
    "\n",
    "    avg_loss = total_loss / args['batch_size']\n",
    "    writer.add_scalar('FineTune/Train_Loss', avg_loss, epoch)\n",
    "\n",
    "    if epoch % 5 == 0:\n",
    "        val_auc, val_ap, val_f1, val_g_mean = test(idx_val, y_val, bsne_model, feat_data, edge_indexs, device)\n",
    "\n",
    "        writer.add_scalar('Validation/AUC', val_auc, epoch)\n",
    "        writer.add_scalar('Validation/F1', val_f1, epoch)\n",
    "        writer.add_scalar('Validation/GMean', val_g_mean, epoch)\n",
    "\n",
    "        print(f'Epoch: {epoch:03d} | Loss: {avg_loss:.4f} | Val AUC: {val_auc:.4f} | Val F1: {val_f1:.4f}')\n",
    "\n",
    "        scheduler.step(val_auc)\n",
    "\n",
    "        if val_auc > best_val_auc:\n",
    "            best_val_auc = val_auc\n",
    "            no_improve_epochs = 0\n",
    "            best_model_state = copy.deepcopy(bsne_model.state_dict())\n",
    "        else:\n",
    "            no_improve_epochs += 1\n",
    "\n",
    "        if no_improve_epochs >= args['patience']:\n",
    "            print(f\"Early stopping at epoch {epoch}\")\n",
    "            early_stop = True\n",
    "\n",
    "# 加载最佳模型\n",
    "bsne_model.load_state_dict(best_model_state)\n",
    "\n",
    "# 最终测试\n",
    "test_auc, test_ap, test_f1, test_g_mean = test(idx_test, y_test, bsne_model, feat_data, edge_indexs, device)\n",
    "print(f'\\n=== Final Test Results ===')\n",
    "print(f'Test AUC: {test_auc:.4f} | Test AP: {test_ap:.4f} | Test F1: {test_f1:.4f} | G-mean: {test_g_mean:.4f}')\n",
    "writer.close()\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "ac5746d9",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "541c560d",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "07714ddc",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "be821ebc",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "cuda\n",
      "loading data...\n",
      "\n",
      "=== Starting Fine-tuning ===\n",
      "Epoch: 000 | Loss: 0.1295 | Val AUC: 0.1935 | Val F1: 0.1842\n",
      "Epoch: 005 | Loss: 0.0202 | Val AUC: 0.7797 | Val F1: 0.4751\n",
      "Epoch: 010 | Loss: 0.0042 | Val AUC: 0.8488 | Val F1: 0.4751\n",
      "Epoch: 015 | Loss: 0.0172 | Val AUC: 0.8509 | Val F1: 0.4751\n",
      "Epoch: 020 | Loss: 0.0062 | Val AUC: 0.8458 | Val F1: 0.4751\n",
      "Epoch: 025 | Loss: 0.0047 | Val AUC: 0.8428 | Val F1: 0.4751\n",
      "Epoch: 030 | Loss: 0.0087 | Val AUC: 0.8475 | Val F1: 0.4751\n",
      "Epoch: 035 | Loss: 0.0106 | Val AUC: 0.8578 | Val F1: 0.4751\n",
      "Epoch: 040 | Loss: 0.0083 | Val AUC: 0.8616 | Val F1: 0.4751\n",
      "Epoch: 045 | Loss: 0.0029 | Val AUC: 0.8661 | Val F1: 0.5018\n",
      "Epoch: 050 | Loss: 0.0173 | Val AUC: 0.8736 | Val F1: 0.7153\n",
      "Epoch: 055 | Loss: 0.0272 | Val AUC: 0.8817 | Val F1: 0.6775\n",
      "Epoch: 060 | Loss: 0.0357 | Val AUC: 0.8862 | Val F1: 0.5406\n",
      "Epoch: 065 | Loss: 0.0112 | Val AUC: 0.8912 | Val F1: 0.5224\n",
      "Epoch: 070 | Loss: 0.0414 | Val AUC: 0.8875 | Val F1: 0.6576\n",
      "Epoch: 075 | Loss: 0.0149 | Val AUC: 0.8890 | Val F1: 0.7307\n",
      "Epoch: 080 | Loss: 0.0578 | Val AUC: 0.8890 | Val F1: 0.7255\n",
      "Epoch: 085 | Loss: 0.0539 | Val AUC: 0.8912 | Val F1: 0.7007\n",
      "Epoch: 090 | Loss: 0.0036 | Val AUC: 0.8918 | Val F1: 0.7372\n",
      "Epoch: 095 | Loss: 0.0074 | Val AUC: 0.8922 | Val F1: 0.7716\n",
      "Epoch: 100 | Loss: 0.0365 | Val AUC: 0.8932 | Val F1: 0.7609\n",
      "Epoch: 105 | Loss: 0.0117 | Val AUC: 0.8915 | Val F1: 0.7632\n"
     ]
    },
    {
     "ename": "KeyboardInterrupt",
     "evalue": "",
     "output_type": "error",
     "traceback": [
      "\u001b[31m---------------------------------------------------------------------------\u001b[39m",
      "\u001b[31mKeyboardInterrupt\u001b[39m                         Traceback (most recent call last)",
      "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[9]\u001b[39m\u001b[32m, line 145\u001b[39m\n\u001b[32m    142\u001b[39m writer.add_scalar(\u001b[33m'\u001b[39m\u001b[33mFineTune/Train_Loss\u001b[39m\u001b[33m'\u001b[39m, avg_loss, epoch)\n\u001b[32m    144\u001b[39m \u001b[38;5;28;01mif\u001b[39;00m epoch % \u001b[32m5\u001b[39m == \u001b[32m0\u001b[39m:\n\u001b[32m--> \u001b[39m\u001b[32m145\u001b[39m     val_auc, val_ap, val_f1, val_g_mean = \u001b[43mtest\u001b[49m\u001b[43m(\u001b[49m\u001b[43midx_val\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43my_val\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mbsne_model\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mfeat_data\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43medge_indexs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m    147\u001b[39m     writer.add_scalar(\u001b[33m'\u001b[39m\u001b[33mValidation/AUC\u001b[39m\u001b[33m'\u001b[39m, val_auc, epoch)\n\u001b[32m    148\u001b[39m     writer.add_scalar(\u001b[33m'\u001b[39m\u001b[33mValidation/F1\u001b[39m\u001b[33m'\u001b[39m, val_f1, epoch)\n",
      "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[1]\u001b[39m\u001b[32m, line 272\u001b[39m, in \u001b[36mtest\u001b[39m\u001b[34m(idx_eval, y_eval, model, feat_data, edge_indexs, device, batch_size)\u001b[39m\n\u001b[32m    270\u001b[39m \u001b[38;5;66;03m# 为每个中心节点构建子图\u001b[39;00m\n\u001b[32m    271\u001b[39m \u001b[38;5;28;01mfor\u001b[39;00m xi \u001b[38;5;129;01min\u001b[39;00m batch_centers:\n\u001b[32m--> \u001b[39m\u001b[32m272\u001b[39m     subgraph = \u001b[43mcreate_node_subgraph\u001b[49m\u001b[43m(\u001b[49m\u001b[43mxi\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mfeat_data\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43medge_indexs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdevice\u001b[49m\u001b[43m)\u001b[49m\n\u001b[32m    273\u001b[39m     subgraph_data.append(subgraph)\n\u001b[32m    275\u001b[39m \u001b[38;5;66;03m# 获取中心节点预测\u001b[39;00m\n",
      "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[1]\u001b[39m\u001b[32m, line 69\u001b[39m, in \u001b[36mcreate_node_subgraph\u001b[39m\u001b[34m(node_idx, feat_data, edge_indexs, device)\u001b[39m\n\u001b[32m     67\u001b[39m sub_edge_index = []\n\u001b[32m     68\u001b[39m \u001b[38;5;28;01mfor\u001b[39;00m rel_idx \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mrange\u001b[39m(\u001b[38;5;28mlen\u001b[39m(edge_indexs)):\n\u001b[32m---> \u001b[39m\u001b[32m69\u001b[39m     edge_index = \u001b[43medge_indexs\u001b[49m\u001b[43m[\u001b[49m\u001b[43mrel_idx\u001b[49m\u001b[43m]\u001b[49m\u001b[43m[\u001b[49m\u001b[32;43m0\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m.\u001b[49m\u001b[43mcpu\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m.numpy()\n\u001b[32m     70\u001b[39m     mask = np.isin(edge_index[\u001b[32m0\u001b[39m], sub_nodes) & np.isin(edge_index[\u001b[32m1\u001b[39m], sub_nodes)\n\u001b[32m     71\u001b[39m     local_edges = edge_index[:, mask]\n",
      "\u001b[31mKeyboardInterrupt\u001b[39m: "
     ]
    }
   ],
   "source": [
    "# 进行分类测试（0708\n",
    "\n",
    "\n",
    "# 参数设置\n",
    "args = {\n",
    "    \"dataset\": \"amazon\",\n",
    "#     \"dataset\": \"yelp\",\n",
    "    \"batch_size\": 8,\n",
    "    \"sample_size\": 50,\n",
    "    \"weight_decay\": 0.00005,\n",
    "    \"emb_size\": 32,\n",
    "    \"pretrain_epochs\": 50,\n",
    "    \"pretrain_lr\": 0.0005, #0.0005，\n",
    "    \"finetune_lr\": 0.0005,\n",
    "    \"num_epochs\": 200,\n",
    "    \"pretrain_patience\": 20,\n",
    "    \"patience\": 30,\n",
    "    \"tsne_weight\": 0.3,\n",
    "    \"weight\": 0.6,\n",
    "    \"layers\": 7,\n",
    "    \"test_size\": 0.6,\n",
    "    \"val_size\": 0.5,\n",
    "    \"layers_tree\": 7,\n",
    "    \"seed\": 76,\n",
    "    \"num_heads\": 4,\n",
    "    \"num_layers\": 2,\n",
    "    \"drop_rate\": 0.5\n",
    "}\n",
    "\n",
    "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n",
    "print(device)\n",
    "\n",
    "timestamp = time.strftime(\"%Y%m%d-%H%M%S\")\n",
    "writer = SummaryWriter(f'runs/{args[\"dataset\"]}_{timestamp}')\n",
    "\n",
    "print('loading data...')\n",
    "prefix = \"../../data/\"\n",
    "\n",
    "edge_indexs, feat_data, labels = load_data(args['dataset'], args['layers_tree'], prefix)\n",
    "\n",
    "np.random.seed(args['seed'])\n",
    "rd.seed(args['seed'])\n",
    "\n",
    "if args['dataset'] == 'yelp':\n",
    "    index = list(range(len(labels)))\n",
    "    idx_train_val, idx_test, y_train_val, y_test = train_test_split(index, labels, stratify=labels,\n",
    "                                                                    test_size=args['test_size'], random_state=2,\n",
    "                                                                    shuffle=True)\n",
    "    idx_train, idx_val, y_train, y_val = train_test_split(idx_train_val, y_train_val,\n",
    "                                                          stratify=y_train_val, test_size=args['val_size'],\n",
    "                                                          random_state=2, shuffle=True)\n",
    "    dist_path = os.path.join(prefix, \"YelpChi_shortest_distance.pkl\")\n",
    "elif args['dataset'] == 'amazon':\n",
    "    index = list(range(3305, len(labels)))\n",
    "    idx_train_val, idx_test, y_train_val, y_test = train_test_split(index, labels[3305:],\n",
    "                                                                    stratify=labels[3305:],\n",
    "                                                                    test_size=args['test_size'],\n",
    "                                                                    random_state=2, shuffle=True)\n",
    "    idx_train, idx_val, y_train, y_val = train_test_split(idx_train_val, y_train_val,\n",
    "                                                          stratify=y_train_val, test_size=args['val_size'],\n",
    "                                                          random_state=2, shuffle=True)\n",
    "    dist_path = os.path.join(prefix, \"Amazon_shortest_distance.pkl\")\n",
    "\n",
    "with open(dist_path, 'rb') as f:\n",
    "    dist_data = pickle.load(f)\n",
    "    dist_matrix = torch.tensor(dist_data['dist_matrix']).to(device)\n",
    "\n",
    "# 准备特征数据\n",
    "feat_data = torch.tensor(feat_data).float()\n",
    "# 特征归一化\n",
    "scaler = MinMaxScaler()\n",
    "feat_data = torch.tensor(scaler.fit_transform(feat_data)).float().to(device)\n",
    "\n",
    "# 初始化模型\n",
    "bsne_model = BSNE_Transformer(\n",
    "    in_feat=feat_data.shape[1],\n",
    "    out_feat=2,\n",
    "    relation_nums=len(edge_indexs),\n",
    "    d_model=64,\n",
    "    nhead=args['num_heads'],\n",
    "    num_layers=args['num_layers'],\n",
    "    dim_feedforward=256,\n",
    "    drop_rate=args['drop_rate']\n",
    ").to(device)\n",
    "\n",
    "# 将边索引转移到设备\n",
    "for edge_index in edge_indexs:\n",
    "    edge_index[0] = edge_index[0].to(device)\n",
    "    edge_index[1] = [tensor.to(device) for tensor in edge_index[1]]\n",
    "\n",
    "    \n",
    "print(\"\\n=== Starting Fine-tuning ===\")\n",
    "bsne_model.classifier.requires_grad_(True)\n",
    "optimizer = torch.optim.AdamW(\n",
    "    bsne_model.parameters(),\n",
    "    lr=args['finetune_lr'],\n",
    "    weight_decay=5e-5\n",
    ")\n",
    "scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(\n",
    "    optimizer, mode='max', factor=0.5, patience=10, verbose=True\n",
    ")\n",
    "\n",
    "best_val_auc = 0.0\n",
    "best_model_state = None\n",
    "train_pos, train_neg = pos_neg_split(idx_train, y_train)\n",
    "\n",
    "no_improve_epochs = 0\n",
    "early_stop = False\n",
    "\n",
    "for epoch in range(args['num_epochs']):\n",
    "    if early_stop:\n",
    "        break\n",
    "\n",
    "    bsne_model.train()\n",
    "    total_loss = 0.0\n",
    "\n",
    "    # 采样中心节点\n",
    "    batch_centers = rd.sample(train_pos + train_neg, args['batch_size'])\n",
    "    subgraph_data = []\n",
    "\n",
    "    for xi in batch_centers:\n",
    "        subgraph = create_node_subgraph(xi, feat_data, edge_indexs, device)\n",
    "        #         print(len(subgraph['features']))\n",
    "        subgraph_data.append(subgraph)\n",
    "\n",
    "    optimizer.zero_grad()\n",
    "\n",
    "    center_logits, _ = bsne_model(subgraph_data)  # [B, 2]\n",
    "\n",
    "    # 提取中心节点标签\n",
    "    labels_center = torch.tensor([labels[xi] for xi in batch_centers]).to(device).long()\n",
    "\n",
    "    # 计算分类损失\n",
    "    cls_loss = F.nll_loss(center_logits, labels_center)\n",
    "\n",
    "    cls_loss.backward()\n",
    "    optimizer.step()\n",
    "\n",
    "    total_loss += cls_loss.item()\n",
    "\n",
    "    avg_loss = total_loss / args['batch_size']\n",
    "    writer.add_scalar('FineTune/Train_Loss', avg_loss, epoch)\n",
    "\n",
    "    if epoch % 5 == 0:\n",
    "        val_auc, val_ap, val_f1, val_g_mean = test(idx_val, y_val, bsne_model, feat_data, edge_indexs, device)\n",
    "\n",
    "        writer.add_scalar('Validation/AUC', val_auc, epoch)\n",
    "        writer.add_scalar('Validation/F1', val_f1, epoch)\n",
    "        writer.add_scalar('Validation/GMean', val_g_mean, epoch)\n",
    "\n",
    "        print(f'Epoch: {epoch:03d} | Loss: {avg_loss:.4f} | Val AUC: {val_auc:.4f} | Val F1: {val_f1:.4f}')\n",
    "\n",
    "        scheduler.step(val_auc)\n",
    "\n",
    "        if val_auc > best_val_auc:\n",
    "            best_val_auc = val_auc\n",
    "            no_improve_epochs = 0\n",
    "            best_model_state = copy.deepcopy(bsne_model.state_dict())\n",
    "        else:\n",
    "            no_improve_epochs += 1\n",
    "\n",
    "        if no_improve_epochs >= args['patience']:\n",
    "            print(f\"Early stopping at epoch {epoch}\")\n",
    "            early_stop = True\n",
    "\n",
    "# 加载最佳模型\n",
    "bsne_model.load_state_dict(best_model_state)\n",
    "\n",
    "# 最终测试\n",
    "test_auc, test_ap, test_f1, test_g_mean = test(idx_test, y_test, bsne_model, feat_data, edge_indexs, device)\n",
    "print(f'\\n=== Final Test Results ===')\n",
    "print(f'Test AUC: {test_auc:.4f} | Test AP: {test_ap:.4f} | Test F1: {test_f1:.4f} | G-mean: {test_g_mean:.4f}')\n",
    "writer.close()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "965b185f",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "509fd972",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "15bfb7b4",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "bp_features: tensor([[ 1.5379e-01, -2.5115e-01, -1.4835e+00,  ..., -3.8957e+00,\n",
      "         -3.1473e-01,  1.0758e-01],\n",
      "        [ 5.9252e-01, -1.2292e-02, -3.0347e-01,  ..., -2.4255e+00,\n",
      "         -6.3382e-04, -2.3526e-01],\n",
      "        [-1.4551e-01,  4.6990e-02,  6.0272e-03,  ..., -2.2072e+00,\n",
      "         -4.1145e-01,  3.6693e-01],\n",
      "        ...,\n",
      "        [-4.5456e-01, -1.9187e-01, -6.8510e-02,  ..., -1.5710e+00,\n",
      "         -6.7994e-01,  8.8107e-01],\n",
      "        [-6.9214e-02, -9.3351e-01, -1.5648e-01,  ..., -3.2156e+00,\n",
      "          3.9368e-01,  1.2101e+00],\n",
      "        [ 4.1762e-02, -3.7311e-01, -1.0503e-01,  ...,  1.5461e-01,\n",
      "         -4.5418e-01,  4.0897e-02]], device='cuda:0', grad_fn=<StackBackward0>)\n",
      "bu_features: tensor([[-0.0184, -0.5900, -0.7437,  ..., -1.0337,  1.4888, -1.1678],\n",
      "        [ 0.3477,  0.5557, -0.7436,  ...,  0.1818, -0.5751, -0.3329],\n",
      "        [ 0.0310,  0.8537, -1.4270,  ..., -0.1282, -0.2549,  0.1970],\n",
      "        ...,\n",
      "        [ 1.7768, -0.5098,  0.2406,  ..., -0.1706, -0.4174,  0.7792],\n",
      "        [ 0.1729,  0.1236, -0.6441,  ..., -3.1630, -0.0760, -0.6291],\n",
      "        [-0.6186,  1.7482, -0.7534,  ..., -1.9393,  0.1020,  0.0648]],\n",
      "       device='cuda:0', grad_fn=<StackBackward0>)\n"
     ]
    },
    {
     "ename": "NameError",
     "evalue": "name 'orig_dists_bp' is not defined",
     "output_type": "error",
     "traceback": [
      "\u001b[31m---------------------------------------------------------------------------\u001b[39m",
      "\u001b[31mNameError\u001b[39m                                 Traceback (most recent call last)",
      "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[4]\u001b[39m\u001b[32m, line 3\u001b[39m\n\u001b[32m      1\u001b[39m \u001b[38;5;28mprint\u001b[39m(\u001b[33m\"\u001b[39m\u001b[33mbp_features:\u001b[39m\u001b[33m\"\u001b[39m,bp_features)\n\u001b[32m      2\u001b[39m \u001b[38;5;28mprint\u001b[39m(\u001b[33m\"\u001b[39m\u001b[33mbu_features:\u001b[39m\u001b[33m\"\u001b[39m,bu_features)\n\u001b[32m----> \u001b[39m\u001b[32m3\u001b[39m \u001b[38;5;28mprint\u001b[39m(\u001b[33m\"\u001b[39m\u001b[33morig_dists_bp:\u001b[39m\u001b[33m\"\u001b[39m,\u001b[43morig_dists_bp\u001b[49m)\n\u001b[32m      4\u001b[39m \u001b[38;5;28mprint\u001b[39m(\u001b[33m\"\u001b[39m\u001b[33mP:\u001b[39m\u001b[33m\"\u001b[39m,P)\n\u001b[32m      5\u001b[39m \u001b[38;5;28mprint\u001b[39m(\u001b[33m\"\u001b[39m\u001b[33mQ:\u001b[39m\u001b[33m\"\u001b[39m,Q)\n",
      "\u001b[31mNameError\u001b[39m: name 'orig_dists_bp' is not defined"
     ]
    }
   ],
   "source": [
    "print(\"probs:\",probs)\n",
    "print(\"center_feature:\",center_feature)\n",
    "print(\"bp_features:\",bp_features)\n",
    "print(\"bu_features:\",bu_features)\n",
    "\n",
    "print(\"P:\",P)\n",
    "print(\"Q:\",Q)\n",
    "\n",
    "print(\"feat_dists_bp:\",feat_dists_bp)\n",
    "print(\"feat_dists_bu:\",feat_dists_bu)\n",
    "print(\"sum_e_bp:\",sum_e_bp)\n",
    "print(\"sum_e_bu:\",sum_e_bu)\n",
    "print(\"global_ratio:\",global_ratio)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "2e87fc62",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "fb44953e",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor(1.0000)"
      ]
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "probs.sum()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "069c1034",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "-->name: feature_proj.0.weight -->grad_requirs: True --weight: 0.008665 -->grad_value: No gradient\n",
      "-->name: feature_proj.0.bias -->grad_requirs: True --weight: 0.010582 -->grad_value: No gradient\n",
      "-->name: feature_proj.1.weight -->grad_requirs: True --weight: 0.994209 -->grad_value: No gradient\n",
      "-->name: feature_proj.1.bias -->grad_requirs: True --weight: -0.012147 -->grad_value: No gradient\n",
      "-->name: norm.weight -->grad_requirs: True --weight: 0.996044 -->grad_value: No gradient\n",
      "-->name: norm.bias -->grad_requirs: True --weight: -0.003284 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.0.self_attn.in_proj_weight -->grad_requirs: True --weight: 0.001154 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.0.self_attn.in_proj_bias -->grad_requirs: True --weight: 0.000770 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.0.self_attn.out_proj.weight -->grad_requirs: True --weight: 0.000251 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.0.self_attn.out_proj.bias -->grad_requirs: True --weight: 0.002110 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.0.linear1.weight -->grad_requirs: True --weight: 0.000633 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.0.linear1.bias -->grad_requirs: True --weight: -0.007763 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.0.linear2.weight -->grad_requirs: True --weight: 0.000221 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.0.linear2.bias -->grad_requirs: True --weight: 0.005467 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.0.norm1.weight -->grad_requirs: True --weight: 1.003334 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.0.norm1.bias -->grad_requirs: True --weight: -0.006549 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.0.norm2.weight -->grad_requirs: True --weight: 1.000152 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.0.norm2.bias -->grad_requirs: True --weight: -0.001157 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.1.self_attn.in_proj_weight -->grad_requirs: True --weight: -0.001489 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.1.self_attn.in_proj_bias -->grad_requirs: True --weight: -0.003456 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.1.self_attn.out_proj.weight -->grad_requirs: True --weight: -0.000381 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.1.self_attn.out_proj.bias -->grad_requirs: True --weight: -0.005391 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.1.linear1.weight -->grad_requirs: True --weight: -0.000333 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.1.linear1.bias -->grad_requirs: True --weight: -0.014522 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.1.linear2.weight -->grad_requirs: True --weight: 0.000884 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.1.linear2.bias -->grad_requirs: True --weight: 0.008839 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.1.norm1.weight -->grad_requirs: True --weight: 1.000222 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.1.norm1.bias -->grad_requirs: True --weight: -0.004946 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.1.norm2.weight -->grad_requirs: True --weight: 0.988574 -->grad_value: No gradient\n",
      "-->name: relation_encoders.0.layers.1.norm2.bias -->grad_requirs: True --weight: 0.001514 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.0.self_attn.in_proj_weight -->grad_requirs: True --weight: 0.001059 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.0.self_attn.in_proj_bias -->grad_requirs: True --weight: -0.002641 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.0.self_attn.out_proj.weight -->grad_requirs: True --weight: -0.001959 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.0.self_attn.out_proj.bias -->grad_requirs: True --weight: 0.004741 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.0.linear1.weight -->grad_requirs: True --weight: 0.000244 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.0.linear1.bias -->grad_requirs: True --weight: -0.014904 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.0.linear2.weight -->grad_requirs: True --weight: -0.003943 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.0.linear2.bias -->grad_requirs: True --weight: -0.000373 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.0.norm1.weight -->grad_requirs: True --weight: 1.002805 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.0.norm1.bias -->grad_requirs: True --weight: 0.000757 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.0.norm2.weight -->grad_requirs: True --weight: 1.001122 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.0.norm2.bias -->grad_requirs: True --weight: -0.005578 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.1.self_attn.in_proj_weight -->grad_requirs: True --weight: 0.001208 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.1.self_attn.in_proj_bias -->grad_requirs: True --weight: -0.000842 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.1.self_attn.out_proj.weight -->grad_requirs: True --weight: -0.002608 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.1.self_attn.out_proj.bias -->grad_requirs: True --weight: 0.000255 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.1.linear1.weight -->grad_requirs: True --weight: -0.000620 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.1.linear1.bias -->grad_requirs: True --weight: -0.009980 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.1.linear2.weight -->grad_requirs: True --weight: 0.000296 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.1.linear2.bias -->grad_requirs: True --weight: 0.000464 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.1.norm1.weight -->grad_requirs: True --weight: 0.999484 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.1.norm1.bias -->grad_requirs: True --weight: 0.012806 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.1.norm2.weight -->grad_requirs: True --weight: 1.025239 -->grad_value: No gradient\n",
      "-->name: relation_encoders.1.layers.1.norm2.bias -->grad_requirs: True --weight: -0.002184 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.0.self_attn.in_proj_weight -->grad_requirs: True --weight: 0.001369 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.0.self_attn.in_proj_bias -->grad_requirs: True --weight: -0.000463 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.0.self_attn.out_proj.weight -->grad_requirs: True --weight: 0.000816 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.0.self_attn.out_proj.bias -->grad_requirs: True --weight: 0.003603 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.0.linear1.weight -->grad_requirs: True --weight: 0.000447 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.0.linear1.bias -->grad_requirs: True --weight: -0.000787 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.0.linear2.weight -->grad_requirs: True --weight: -0.001296 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.0.linear2.bias -->grad_requirs: True --weight: 0.000606 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.0.norm1.weight -->grad_requirs: True --weight: 0.997347 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.0.norm1.bias -->grad_requirs: True --weight: -0.000370 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.0.norm2.weight -->grad_requirs: True --weight: 0.999627 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.0.norm2.bias -->grad_requirs: True --weight: 0.001037 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.1.self_attn.in_proj_weight -->grad_requirs: True --weight: -0.000457 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.1.self_attn.in_proj_bias -->grad_requirs: True --weight: -0.004389 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.1.self_attn.out_proj.weight -->grad_requirs: True --weight: 0.000166 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.1.self_attn.out_proj.bias -->grad_requirs: True --weight: -0.000464 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.1.linear1.weight -->grad_requirs: True --weight: 0.000110 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.1.linear1.bias -->grad_requirs: True --weight: -0.002366 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.1.linear2.weight -->grad_requirs: True --weight: 0.001228 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.1.linear2.bias -->grad_requirs: True --weight: 0.005763 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.1.norm1.weight -->grad_requirs: True --weight: 0.999454 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.1.norm1.bias -->grad_requirs: True --weight: -0.000763 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.1.norm2.weight -->grad_requirs: True --weight: 0.995095 -->grad_value: No gradient\n",
      "-->name: relation_encoders.2.layers.1.norm2.bias -->grad_requirs: True --weight: -0.002613 -->grad_value: No gradient\n",
      "-->name: classifier.0.weight -->grad_requirs: False --weight: -0.000072 -->grad_value: No gradient\n",
      "-->name: classifier.0.bias -->grad_requirs: False --weight: 0.001129 -->grad_value: No gradient\n",
      "-->name: classifier.3.weight -->grad_requirs: False --weight: -0.001398 -->grad_value: No gradient\n",
      "-->name: classifier.3.bias -->grad_requirs: False --weight: -0.008921 -->grad_value: No gradient\n"
     ]
    }
   ],
   "source": [
    "for name, parms in bsne_model.named_parameters():\n",
    "    grad_value = parms.grad\n",
    "    if grad_value is not None:\n",
    "        grad_mean = torch.mean(grad_value)\n",
    "    else:\n",
    "        grad_mean = \"No gradient\"\n",
    "\n",
    "    print(f'-->name: {name} '\n",
    "          f'-->grad_requirs: {parms.requires_grad} '\n",
    "          f'--weight: {torch.mean(parms.data).item():.6f} '\n",
    "          f'-->grad_value: {grad_mean}')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "adc2ddbd",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "bp_features: tensor([[-0.5702,  1.3168, -0.1787,  ...,  0.7632,  0.0429, -0.1725],\n",
      "        [ 0.1251, -1.1670,  0.3653,  ...,  1.0615, -0.5137,  0.9988],\n",
      "        [-1.0533, -0.3202,  0.4615,  ...,  1.0612, -0.6082,  0.9952],\n",
      "        ...,\n",
      "        [-0.3784, -0.1971,  0.5413,  ...,  0.5937, -0.9628, -0.3055],\n",
      "        [-0.8808, -0.1173,  0.2112,  ..., -1.0315, -1.3149,  0.8907],\n",
      "        [-2.3371,  0.6095, -0.0873,  ...,  0.0804, -0.5554,  0.6815]],\n",
      "       device='cuda:0', grad_fn=<StackBackward0>)\n",
      "bu_features: tensor([[-0.2823,  0.3090,  0.5724,  ...,  0.4419, -1.0853, -1.1917],\n",
      "        [-0.2784, -0.3072,  0.2529,  ...,  0.5045, -0.9009, -1.1452],\n",
      "        [-1.5317, -0.4242,  0.1296,  ..., -0.1155,  0.4415,  0.5392],\n",
      "        ...,\n",
      "        [ 0.7232, -2.5266,  0.5857,  ...,  0.1232, -0.4201,  0.9106],\n",
      "        [ 0.3383, -0.7859,  0.3099,  ...,  0.0577, -0.6736, -1.0603],\n",
      "        [ 0.0080,  0.0586,  0.5155,  ...,  0.1816, -0.3376, -0.1120]],\n",
      "       device='cuda:0', grad_fn=<StackBackward0>)\n",
      "orig_dists_bp: [2 1 1 1 2 1 1 1 2 1 2 2 1 1 2 2 1 1 1 1 2 1 1 2 1 1 2 2 2 2 1 1 2 2 2 1 1\n",
      " 1 1 2]\n",
      "P: tensor([0.0015, 0.0424, 0.0424, 0.0424, 0.0015, 0.0424, 0.0424, 0.0424, 0.0015,\n",
      "        0.0424, 0.0015, 0.0015, 0.0424, 0.0424, 0.0015, 0.0015, 0.0424, 0.0424,\n",
      "        0.0424, 0.0424, 0.0015, 0.0424, 0.0424, 0.0015, 0.0424, 0.0424, 0.0015,\n",
      "        0.0015, 0.0015, 0.0015, 0.0424, 0.0424, 0.0015, 0.0015, 0.0015, 0.0424,\n",
      "        0.0424, 0.0424, 0.0424, 0.0015], device='cuda:0', dtype=torch.float64)\n",
      "feat_dists_bp: tensor([18.2731, 16.3308, 15.3421, 16.1293, 17.3107, 15.8543, 15.5849, 16.0872,\n",
      "        17.1668, 15.5329, 17.4775, 13.7219, 16.3866, 15.4689, 17.1367, 16.1066,\n",
      "        15.8276, 17.2620, 14.2742, 15.3443, 15.9957, 15.4327, 16.2292, 15.6903,\n",
      "        16.0715, 16.0175, 17.6961, 15.9743, 17.8844, 15.9710, 16.4330, 16.2207,\n",
      "        18.3810, 18.1741, 19.0851, 15.8568, 15.9311, 17.3604, 15.2764, 17.7107],\n",
      "       device='cuda:0', grad_fn=<SqueezeBackward1>)\n",
      "Q: tensor([0.0062, 0.0208, 0.0385, 0.0236, 0.0113, 0.0280, 0.0331, 0.0242, 0.0124,\n",
      "        0.0342, 0.0102, 0.1054, 0.0201, 0.0356, 0.0126, 0.0239, 0.0285, 0.0117,\n",
      "        0.0748, 0.0384, 0.0256, 0.0364, 0.0222, 0.0310, 0.0245, 0.0253, 0.0089,\n",
      "        0.0260, 0.0079, 0.0260, 0.0195, 0.0223, 0.0058, 0.0066, 0.0038, 0.0280,\n",
      "        0.0267, 0.0110, 0.0401, 0.0088], device='cuda:0',\n",
      "       grad_fn=<SoftmaxBackward0>)\n",
      "unnorm_probs_global: [0.01 0.1  0.01 ... 0.01 0.01 0.01]\n",
      "Z_global: 235.74600000010003\n",
      "bp_global_prob_sum: 0.01047737819517172\n",
      "k_Bp: 3.128545129078276\n",
      "feat_dists_bu: tensor([16.4020, 16.3742, 18.9296, 17.1797, 16.4774, 16.7754, 17.3933, 17.5789,\n",
      "        17.6073, 16.7882, 16.0743, 15.5274, 15.9045, 18.0716, 15.4278, 14.1915,\n",
      "        15.3967, 14.9977, 17.3479, 18.7902, 16.9052, 14.5443, 16.8814, 16.4295,\n",
      "        15.1829, 17.4214, 18.7773, 16.6827, 17.3622, 17.5833, 18.2887, 17.0419,\n",
      "        15.3728, 17.9242, 15.3646, 18.1372, 16.5211, 16.9873, 17.0721, 19.0133],\n",
      "       device='cuda:0', grad_fn=<SqueezeBackward1>)\n",
      "sum_e_bp: tensor(24.7924, device='cuda:0', grad_fn=<SumBackward0>)\n",
      "sum_e_bu: tensor(23.9281, device='cuda:0', grad_fn=<SumBackward0>)\n",
      "global_ratio: tensor(3.0195, device='cuda:0', grad_fn=<MulBackward0>)\n"
     ]
    }
   ],
   "source": [
    "print(\"bp_features:\",bp_features)\n",
    "print(\"bu_features:\",bu_features)\n",
    "print(\"orig_dists_bp:\",orig_dists_bp)\n",
    "print(\"P:\",P)\n",
    "\n",
    "\n",
    "print(\"feat_dists_bp:\",feat_dists_bp)\n",
    "print(\"Q:\",Q)\n",
    "\n",
    "\n",
    "print(\"unnorm_probs_global:\",unnorm_probs_global)\n",
    "print(\"Z_global:\",Z_global)\n",
    "print(\"bp_global_prob_sum:\",bp_global_prob_sum)\n",
    "print(\"k_Bp:\",k_Bp)\n",
    "print(\"feat_dists_bu:\",feat_dists_bu)\n",
    "print(\"sum_e_bp:\",sum_e_bp)\n",
    "print(\"sum_e_bu:\",sum_e_bu)\n",
    "print(\"global_ratio:\",global_ratio)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "c5142326",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "bp_features: tensor([[ 0.0735,  0.1432,  0.2947,  ..., -0.0746,  0.5167, -0.1833],\n",
      "        [ 0.8301,  0.0871, -1.5590,  ...,  0.5665, -0.5540,  0.1626],\n",
      "        [ 1.5938, -0.7020, -0.6375,  ...,  0.5392, -2.2870,  0.0392],\n",
      "        ...,\n",
      "        [ 0.2287,  0.2691, -0.6960,  ...,  1.8262, -0.2489, -1.0601],\n",
      "        [ 1.4011,  0.6579,  0.0041,  ...,  0.4725, -0.4251, -0.2861],\n",
      "        [ 0.7985,  0.3066, -0.7456,  ...,  1.0053, -0.0059,  0.4827]],\n",
      "       device='cuda:0', grad_fn=<StackBackward0>)\n",
      "bu_features: tensor([[ 0.6321,  0.6420, -0.1064,  ...,  1.5194, -1.2372, -0.6961],\n",
      "        [-0.0558, -0.0347, -0.2537,  ..., -0.3866,  0.1960, -0.6085],\n",
      "        [ 1.3395,  0.4545, -1.1564,  ...,  0.7400, -0.5331, -2.3669],\n",
      "        ...,\n",
      "        [ 1.1103,  0.4445, -0.2255,  ..., -1.2766, -0.6034, -1.1156],\n",
      "        [ 0.8419,  0.0935,  0.0104,  ...,  0.6042, -0.5481, -0.7126],\n",
      "        [ 1.8648,  1.4712, -0.4018,  ...,  0.8256,  0.7816,  0.0853]],\n",
      "       device='cuda:0', grad_fn=<StackBackward0>)\n",
      "orig_dists_bp: [1 1 1 2 2 1 1 2 2 2 2 2 2 2 2 2 1 2 1 2 2 2 1 2 2 2 2 2 2 2 2 2 2 2 2 1 1\n",
      " 2 2 2]\n",
      "P: tensor([0.0903, 0.0903, 0.0903, 0.0032, 0.0032, 0.0903, 0.0903, 0.0032, 0.0032,\n",
      "        0.0032, 0.0032, 0.0032, 0.0032, 0.0032, 0.0032, 0.0032, 0.0903, 0.0032,\n",
      "        0.0903, 0.0032, 0.0032, 0.0032, 0.0903, 0.0032, 0.0032, 0.0032, 0.0032,\n",
      "        0.0032, 0.0032, 0.0032, 0.0032, 0.0032, 0.0032, 0.0032, 0.0032, 0.0903,\n",
      "        0.0903, 0.0032, 0.0032, 0.0032], device='cuda:0', dtype=torch.float64)\n",
      "feat_dists_bp: tensor([17.1739, 17.9419, 17.6980, 17.8211, 17.2373, 18.0952, 17.2932, 17.2454,\n",
      "        15.9230, 18.3066, 16.3189, 17.9622, 17.3002, 17.0915, 18.1144, 16.4267,\n",
      "        16.8798, 17.9886, 18.8088, 16.7149, 16.3378, 17.5355, 17.1079, 16.9240,\n",
      "        16.4198, 16.7808, 18.6371, 18.1660, 17.5035, 18.7379, 16.9212, 16.8213,\n",
      "        16.4792, 17.4383, 17.9245, 17.4383, 17.7713, 16.0490, 16.9128, 17.5431],\n",
      "       device='cuda:0', grad_fn=<SqueezeBackward1>)\n",
      "Q: tensor([0.0219, 0.0090, 0.0120, 0.0104, 0.0204, 0.0076, 0.0191, 0.0202, 0.0931,\n",
      "        0.0059, 0.0589, 0.0088, 0.0190, 0.0241, 0.0074, 0.0520, 0.0308, 0.0086,\n",
      "        0.0033, 0.0373, 0.0576, 0.0145, 0.0237, 0.0293, 0.0524, 0.0346, 0.0040,\n",
      "        0.0070, 0.0150, 0.0036, 0.0294, 0.0330, 0.0490, 0.0162, 0.0092, 0.0162,\n",
      "        0.0110, 0.0805, 0.0297, 0.0143], device='cuda:0',\n",
      "       grad_fn=<SoftmaxBackward0>)\n",
      "unnorm_probs_global: [0.13533528 0.13533528 0.13533528 ... 0.13533528 0.13533528 0.13533528]\n",
      "Z_global: 1635.3245811136128\n",
      "bp_global_prob_sum: 0.004732303909687978\n",
      "k_Bp: 1.4130659474328304\n",
      "feat_dists_bu: tensor([18.5493, 18.2278, 17.8553, 18.3226, 17.2628, 17.0861, 17.7634, 18.5652,\n",
      "        16.0455, 16.7946, 16.8388, 17.5043, 17.0578, 17.6884, 17.6687, 17.8079,\n",
      "        18.3602, 17.6491, 16.6072, 18.1221, 18.5421, 17.4638, 17.9239, 18.8807,\n",
      "        17.6198, 18.5791, 16.7308, 16.2829, 16.9668, 17.1444, 17.2959, 16.9767,\n",
      "        17.2846, 17.1383, 17.6546, 18.2743, 16.7264, 18.1228, 18.0507, 16.7941],\n",
      "       device='cuda:0', grad_fn=<SqueezeBackward1>)\n",
      "sum_e_bp: tensor(25.2063, device='cuda:0', grad_fn=<SumBackward0>)\n",
      "sum_e_bu: tensor(24.1805, device='cuda:0', grad_fn=<SumBackward0>)\n",
      "global_ratio: tensor(1.3556, device='cuda:0', grad_fn=<MulBackward0>)\n"
     ]
    }
   ],
   "source": [
    "print(\"bp_features:\",bp_features)\n",
    "print(\"bu_features:\",bu_features)\n",
    "print(\"orig_dists_bp:\",orig_dists_bp)\n",
    "print(\"P:\",P)\n",
    "\n",
    "\n",
    "print(\"feat_dists_bp:\",feat_dists_bp)\n",
    "print(\"Q:\",Q)\n",
    "\n",
    "\n",
    "print(\"unnorm_probs_global:\",unnorm_probs_global)\n",
    "print(\"Z_global:\",Z_global)\n",
    "print(\"bp_global_prob_sum:\",bp_global_prob_sum)\n",
    "print(\"k_Bp:\",k_Bp)\n",
    "print(\"feat_dists_bu:\",feat_dists_bu)\n",
    "print(\"sum_e_bp:\",sum_e_bp)\n",
    "print(\"sum_e_bu:\",sum_e_bu)\n",
    "print(\"global_ratio:\",global_ratio)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 79,
   "id": "600b6423",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor(25.3457, device='cuda:0', grad_fn=<SumBackward0>)"
      ]
     },
     "execution_count": 79,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "sum_e_bu"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 80,
   "id": "bb3ae0bc",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor(26.3437, device='cuda:0', grad_fn=<SumBackward0>)"
      ]
     },
     "execution_count": 80,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "sum_e_bp"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 56,
   "id": "cf4abad0",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor(0.2500, device='cuda:0', grad_fn=<PowBackward0>)"
      ]
     },
     "execution_count": 56,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "loss_global"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 44,
   "id": "688ce07c",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor([15.3787, 16.0121, 15.4253, 15.6754, 16.1709, 15.9302, 16.6181, 15.6466,\n",
       "        16.4141, 16.9830, 14.4971, 14.9762, 15.7784, 16.2958, 16.1495, 14.7736,\n",
       "        15.1410, 15.6543, 16.2820, 16.7194, 16.9441, 16.7581, 16.3961, 15.5213,\n",
       "        15.6640, 15.7070, 16.2821, 17.3215, 15.1667, 16.1609, 16.2895, 15.4043,\n",
       "        15.4740, 14.9284, 15.5751, 14.2662, 15.6083, 17.0193, 15.5923, 15.1397],\n",
       "       device='cuda:0', grad_fn=<SqueezeBackward1>)"
      ]
     },
     "execution_count": 44,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "feat_dists_bp"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 72,
   "id": "69f718fb",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor(1.0011e-06, device='cuda:0', grad_fn=<SumBackward0>)"
      ]
     },
     "execution_count": 72,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "sum_e_bu"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 71,
   "id": "63fefd71",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor([17.9786, 17.8149, 17.2145, 18.3167, 16.4932, 17.2646, 18.8500, 17.8485,\n",
       "        16.4257, 17.3208, 17.3076, 17.1714, 17.3531, 17.5322, 17.8556, 18.1759,\n",
       "        17.2787, 18.6417, 18.6537, 18.1426, 17.9105, 19.2785, 16.3744, 16.7953,\n",
       "        18.3104, 18.5197, 18.4340, 18.1621, 17.8350, 17.6045, 17.5249, 17.0740,\n",
       "        17.7094, 17.5130, 18.7455, 18.2615, 16.3142, 17.2683, 17.7009, 18.7330],\n",
       "       device='cuda:0', grad_fn=<SqueezeBackward1>)"
      ]
     },
     "execution_count": 71,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "feat_dists_bp"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 49,
   "id": "ed949b47",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor([0.1971, 0.2812, 0.2541, 0.1176, 0.1694, 0.1517, 0.1961, 0.1920, 0.2319,\n",
       "        0.2380, 0.0799, 0.1419, 0.1515, 0.1990, 0.2612, 0.1326, 0.2011, 0.1924,\n",
       "        0.2845, 0.2461, 0.2412, 0.1704, 0.2357, 0.1555, 0.0924, 0.1515, 0.2337,\n",
       "        0.2247, 0.1362, 0.1792, 0.3258, 0.3037, 0.2473, 0.2268, 0.1531, 0.1798,\n",
       "        0.2265, 0.3413, 0.1420, 0.1489], device='cuda:0',\n",
       "       grad_fn=<SqueezeBackward4>)"
      ]
     },
     "execution_count": 49,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "feat_dists_bu"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 50,
   "id": "ea8f2ada",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor([17.3830, 18.8074, 17.2807, 16.2711, 16.5384, 17.5892, 17.5048, 18.6080,\n",
       "        17.4325, 17.3218, 15.9989, 17.1385, 16.9832, 17.4741, 17.7036, 16.7957,\n",
       "        16.9648, 16.8700, 18.7639, 16.2010, 15.8607, 17.6674, 17.1501, 17.4688,\n",
       "        17.7472, 18.1129, 17.2142, 16.6794, 17.3207, 17.3963, 17.6601, 16.1668,\n",
       "        17.2354, 17.4118, 17.9075, 17.9652, 16.5519, 17.9724, 17.1188, 17.2855],\n",
       "       device='cuda:0', grad_fn=<SqueezeBackward1>)"
      ]
     },
     "execution_count": 50,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "feat_dists_bp"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "571cab27",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "74f15734",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "tsne01",
   "language": "python",
   "name": "tsne01"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.11.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
