{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "6d9d0ae9",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/data/home/sczc619/run/LML/anaconda3/envs/tsne/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
      "  from .autonotebook import tqdm as notebook_tqdm\n"
     ]
    }
   ],
   "source": [
    "import pickle\n",
    "import os\n",
    "import random as rd\n",
    "import numpy as np\n",
    "import copy\n",
    "import copy as cp\n",
    "import dgl\n",
    "from collections import defaultdict\n",
    "import matplotlib.pyplot as plt\n",
    "import time\n",
    "import scipy.sparse as sp\n",
    "from scipy.io import loadmat\n",
    "import torch\n",
    "import torch.nn.functional as F\n",
    "import torch.nn as nn\n",
    "from torch.nn import TransformerEncoder, TransformerEncoderLayer\n",
    "from torch_geometric.utils import to_dense_adj, subgraph\n",
    "from torch.utils.tensorboard import SummaryWriter\n",
    "from sklearn.preprocessing import MinMaxScaler\n",
    "from sklearn.manifold import TSNE\n",
    "from sklearn.model_selection import train_test_split\n",
    "from sklearn.metrics import f1_score, accuracy_score, recall_score, roc_auc_score, average_precision_score, \\\n",
    "    confusion_matrix\n",
    "\n",
    "filelist = {\n",
    "    'amz_upu': 'amz_upu_adjlists.pickle',\n",
    "    'amz_usu': 'amz_usu_adjlists.pickle',\n",
    "    'amz_uvu': 'amz_uvu_adjlists.pickle',\n",
    "    'yelp_rsr': 'yelp_rsr_adjlists.pickle',\n",
    "    'yelp_rtr': 'yelp_rtr_adjlists.pickle',\n",
    "    'yelp_rur': 'yelp_rur_adjlists.pickle'\n",
    "}\n",
    "\n",
    "file_matrix_prefix = {\n",
    "    'amz_upu': 'amazon_upu_matrix_',\n",
    "    'amz_usu': 'amazon_usu_matrix_',\n",
    "    'amz_uvu': 'amazon_uvu_matrix_',\n",
    "    'yelp_rsr': 'yelpnet_rsr_matrix_decompision_',\n",
    "    'yelp_rtr': 'yelpnet_rtr_matrix_decompision_',\n",
    "    'yelp_rur': 'yelpnet_rur_matrix_decompision_'\n",
    "}\n",
    "\n",
    "\n",
    "def create_node_subgraph(node_idx, feat_data, edge_indexs, device):\n",
    "    \"\"\"\n",
    "    为单个节点创建一阶邻居子图（所有邻居）\n",
    "    \"\"\"\n",
    "    neighbors = set()\n",
    "    for rel_idx in range(len(edge_indexs)):\n",
    "        edge_index = edge_indexs[rel_idx][0].cpu().numpy()\n",
    "        # 找出以中心点为起点的边的终点\n",
    "        rel_neighbors = edge_index[1][edge_index[0] == node_idx].tolist()\n",
    "        neighbors.update(rel_neighbors)\n",
    "\n",
    "    # 移除中心节点自身\n",
    "    neighbors.discard(node_idx)\n",
    "    neighbors = list(neighbors)\n",
    "\n",
    "    # 如果邻居太多，进行随机采样截取\n",
    "    sample_size = 399\n",
    "    if len(neighbors) > sample_size:\n",
    "        neighbors = np.random.choice(neighbors, size=sample_size, replace=False).tolist()\n",
    "\n",
    "    # 构建子图节点列表，确保中心节点是第一个\n",
    "    sub_nodes = [node_idx] + [n for n in neighbors if n != node_idx]\n",
    "\n",
    "    # 构建子图边列表\n",
    "    sub_edge_index = []\n",
    "    for rel_idx in range(len(edge_indexs)):\n",
    "        edge_index = edge_indexs[rel_idx][0].cpu().numpy()\n",
    "        mask = np.isin(edge_index[0], sub_nodes) & np.isin(edge_index[1], sub_nodes)\n",
    "        local_edges = edge_index[:, mask]\n",
    "\n",
    "        # 创建节点映射\n",
    "        node_map = {n: i for i, n in enumerate(sub_nodes)}\n",
    "\n",
    "        # 将全局索引映射到局部索引\n",
    "        if len(local_edges) > 0 and local_edges.size > 0:\n",
    "            src_nodes = [node_map[src] for src in local_edges[0]]\n",
    "            dst_nodes = [node_map[dst] for dst in local_edges[1]]\n",
    "            edge_tensor = torch.tensor([src_nodes, dst_nodes], dtype=torch.long)\n",
    "        else:\n",
    "            # 添加自环确保图不为空\n",
    "            edge_tensor = torch.tensor([[0], [0]], dtype=torch.long)\n",
    "\n",
    "        sub_edge_index.append(edge_tensor.to(device))\n",
    "\n",
    "    # 创建子图数据\n",
    "    subgraph = {\n",
    "        'features': feat_data[sub_nodes].clone(),\n",
    "        'edges': sub_edge_index,\n",
    "        'global_idx': sub_nodes\n",
    "    }\n",
    "\n",
    "    return subgraph\n",
    "\n",
    "\n",
    "def dict_to_edge_index(edge_dict):\n",
    "    source_nodes = []\n",
    "    target_nodes = []\n",
    "    for src, targets in edge_dict.items():\n",
    "        for target in targets:\n",
    "            source_nodes.append(src)\n",
    "            target_nodes.append(target)\n",
    "    edge_index = [source_nodes, target_nodes]\n",
    "    return torch.LongTensor(edge_index)\n",
    "\n",
    "\n",
    "def numpy_array_to_edge_index(np_array):\n",
    "    assert np_array.ndim == 2 and np_array.shape[0] == np_array.shape[1], \"Input must be a square matrix.\"\n",
    "    rows, cols = np.nonzero(np_array)\n",
    "    edge_index = np.vstack((rows, cols))\n",
    "    edge_index_tensor = torch.from_numpy(edge_index).long()\n",
    "    return edge_index_tensor\n",
    "\n",
    "\n",
    "def load_data(data, k=2, prefix=''):\n",
    "    pickle_file = {}\n",
    "    matrix_prefix = {}\n",
    "    for key in filelist:\n",
    "        pickle_file[key] = os.path.join(prefix, filelist[key])\n",
    "        matrix_prefix[key] = os.path.join(prefix, file_matrix_prefix[key])\n",
    "\n",
    "    if data == 'yelp':\n",
    "        data_file = loadmat(os.path.join(prefix, 'YelpChi.mat'))\n",
    "        labels = data_file['label'].flatten()\n",
    "        feat_data = data_file['features'].todense().A\n",
    "        with open(pickle_file['yelp_rur'], 'rb') as file:\n",
    "            relation1 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation1 = dict_to_edge_index(relation1)\n",
    "        relation1_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['yelp_rur'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation1_tree.append(numpy_array_to_edge_index(tree))\n",
    "        with open(pickle_file['yelp_rtr'], 'rb') as file:\n",
    "            relation2 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation2 = dict_to_edge_index(relation2)\n",
    "        relation2_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['yelp_rtr'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation2_tree.append(numpy_array_to_edge_index(tree))\n",
    "        with open(pickle_file['yelp_rsr'], 'rb') as file:\n",
    "            relation3 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation3 = dict_to_edge_index(relation3)\n",
    "        relation3_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['yelp_rsr'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation3_tree.append(numpy_array_to_edge_index(tree))\n",
    "        return [[relation1, relation1_tree], [relation2, relation2_tree],\n",
    "                [relation3, relation3_tree]], feat_data, labels\n",
    "    elif data == 'amazon':\n",
    "        data_file = loadmat(os.path.join(prefix, 'Amazon.mat'))\n",
    "        labels = data_file['label'].flatten()\n",
    "        feat_data = data_file['features'].todense().A\n",
    "        with open(pickle_file['amz_upu'], 'rb') as file:\n",
    "            relation1 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation1 = dict_to_edge_index(relation1)\n",
    "        relation1_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['amz_upu'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation1_tree.append(numpy_array_to_edge_index(tree))\n",
    "        with open(pickle_file['amz_usu'], 'rb') as file:\n",
    "            relation2 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation2 = dict_to_edge_index(relation2)\n",
    "        relation2_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['amz_usu'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation2_tree.append(numpy_array_to_edge_index(tree))\n",
    "        with open(pickle_file['amz_uvu'], 'rb') as file:\n",
    "            relation3 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation3_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['amz_uvu'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation3_tree.append(numpy_array_to_edge_index(tree))\n",
    "        relation3 = dict_to_edge_index(relation3)\n",
    "\n",
    "        return [[relation1, relation1_tree], [relation2, relation2_tree],\n",
    "                [relation3, relation3_tree]], feat_data, labels\n",
    "\n",
    "\n",
    "def Visualization(labels, embedding, prefix):\n",
    "    train_pos, train_neg = pos_neg_split(list(range(len(labels))), labels)\n",
    "    sampled_idx_train = undersample(train_pos, train_neg, scale=1)\n",
    "    tsne = TSNE(n_components=2, random_state=43)\n",
    "    sampled_idx_train = np.array(sampled_idx_train)\n",
    "    sampled_idx_train = np.random.choice(sampled_idx_train, size=5000, replace=True)\n",
    "    ps = embedding[sampled_idx_train]\n",
    "    ls = labels[sampled_idx_train]\n",
    "\n",
    "    X_reduced = tsne.fit_transform(ps)\n",
    "\n",
    "    scaler = MinMaxScaler(feature_range=(0, 1))\n",
    "    X_scaled = scaler.fit_transform(X_reduced)\n",
    "    print(X_scaled.shape)\n",
    "\n",
    "    plt.figure(figsize=(8, 8))\n",
    "\n",
    "    plt.scatter(X_scaled[ls == 0, 0], X_scaled[ls == 0, 1], c='#14517C', label='Label 0', s=3)\n",
    "\n",
    "    plt.scatter(X_scaled[ls == 1, 0], X_scaled[ls == 1, 1], c='#FA7F6F', label='Label 1', s=3)\n",
    "\n",
    "    ax = plt.gca()\n",
    "    ax.spines['top'].set_visible(False)\n",
    "    ax.spines['right'].set_visible(False)\n",
    "    ax.spines['left'].set_visible(False)\n",
    "    ax.spines['bottom'].set_visible(False)\n",
    "\n",
    "    plt.xticks([])\n",
    "    plt.yticks([])\n",
    "\n",
    "    plt.xlim(0, 1)\n",
    "    plt.ylim(0, 1)\n",
    "    filepath = os.path.join(prefix, 'HOGRL.png')\n",
    "    plt.savefig(filepath)\n",
    "    plt.show()\n",
    "\n",
    "\n",
    "def normalize(mx):\n",
    "    rowsum = np.array(mx.sum(1)) + 0.01\n",
    "    r_inv = np.power(rowsum, -1).flatten()\n",
    "    r_inv[np.isinf(r_inv)] = 0.\n",
    "    r_mat_inv = sp.diags(r_inv)\n",
    "    mx = r_mat_inv.dot(mx)\n",
    "    return mx\n",
    "\n",
    "\n",
    "def pos_neg_split(nodes, labels):\n",
    "    # 正负样本分割\n",
    "    pos_nodes = []\n",
    "    neg_nodes = cp.deepcopy(nodes)\n",
    "    aux_nodes = cp.deepcopy(nodes)\n",
    "    for idx, label in enumerate(labels):\n",
    "        if label == 1:\n",
    "            pos_nodes.append(aux_nodes[idx])\n",
    "            neg_nodes.remove(aux_nodes[idx])\n",
    "\n",
    "    return pos_nodes, neg_nodes\n",
    "\n",
    "\n",
    "def undersample(pos_nodes, neg_nodes, scale=1):\n",
    "    # 对负样本进行下采样，平衡正负样本数量\n",
    "    aux_nodes = cp.deepcopy(neg_nodes)\n",
    "    aux_nodes = rd.sample(aux_nodes, k=int(len(pos_nodes) * scale))\n",
    "    batch_nodes = pos_nodes + aux_nodes\n",
    "\n",
    "    return batch_nodes\n",
    "\n",
    "\n",
    "def calculate_g_mean(y_true, y_pred):\n",
    "    cm = confusion_matrix(y_true, y_pred)\n",
    "    sensitivities = []\n",
    "    for i in range(len(cm)):\n",
    "        TP = cm[i, i]\n",
    "        FN = cm[i, :].sum() - TP\n",
    "        sensitivity = TP / (TP + FN) if (TP + FN) != 0 else 0\n",
    "        sensitivities.append(sensitivity)\n",
    "    g_mean = np.prod(sensitivities) ** (1 / len(sensitivities))\n",
    "    return g_mean\n",
    "\n",
    "\n",
    "def iterate_batches(indices, batch_size, shuffle=True):\n",
    "    \"\"\"\n",
    "        将索引列表划分为指定大小的批次\n",
    "        :param indices: 样本索引列表\n",
    "        :param batch_size: 每个批次的大小\n",
    "        :param shuffle: 是否打乱顺序\n",
    "        :return: 生成批次索引的迭代器\n",
    "    \"\"\"\n",
    "    if shuffle:\n",
    "        rd.shuffle(indices)\n",
    "    for i in range(0, len(indices), batch_size):\n",
    "        yield indices[i:i + batch_size]\n",
    "\n",
    "\n",
    "def test(idx_eval, y_eval, model, feat_data, edge_indexs, device, batch_size=64):\n",
    "    model.eval()\n",
    "    all_probs = []\n",
    "    all_labels = []\n",
    "\n",
    "    # 分批处理\n",
    "    for batch_centers in iterate_batches(idx_eval, batch_size, shuffle=False):\n",
    "        subgraph_data = []\n",
    "\n",
    "        # 为每个中心节点构建子图\n",
    "        for xi in batch_centers:\n",
    "            subgraph = create_node_subgraph(xi, feat_data, edge_indexs, device)\n",
    "            subgraph_data.append(subgraph)\n",
    "\n",
    "        # 获取中心节点预测\n",
    "        with torch.no_grad():\n",
    "            center_logits, _ = model(subgraph_data)  # [B, 2]\n",
    "            probs = torch.softmax(center_logits, dim=-1)[:, 1]  # 正类概率\n",
    "            all_probs.extend(probs.cpu().numpy())\n",
    "            all_labels.extend([labels[xi] for xi in batch_centers])\n",
    "\n",
    "    # 计算指标\n",
    "    auc_score = roc_auc_score(all_labels, all_probs)\n",
    "    ap_score = average_precision_score(all_labels, all_probs)\n",
    "    pred_labels = (np.array(all_probs) >= 0.5).astype(int)\n",
    "    f1 = f1_score(all_labels, pred_labels, average='macro')\n",
    "    g_mean = calculate_g_mean(all_labels, pred_labels)\n",
    "\n",
    "    return auc_score, ap_score, f1, g_mean\n",
    "\n",
    "\n",
    "class BSNE_Transformer(nn.Module):\n",
    "    def __init__(self, in_feat, out_feat, relation_nums=3, d_model=256,\n",
    "                 nhead=8, num_layers=3, dim_feedforward=256,\n",
    "                 drop_rate=0.5):\n",
    "        super().__init__()\n",
    "        self.relation_nums = relation_nums\n",
    "        self.d_model = d_model\n",
    "        self.nhead = nhead\n",
    "\n",
    "        self.feature_proj = nn.Sequential(\n",
    "            nn.Linear(in_feat, d_model),\n",
    "            nn.LayerNorm(d_model),\n",
    "            nn.ReLU()\n",
    "        )\n",
    "        self.norm = nn.LayerNorm(d_model)\n",
    "\n",
    "        self.relation_encoders = nn.ModuleList([\n",
    "            TransformerEncoder(\n",
    "                TransformerEncoderLayer(\n",
    "                    d_model=d_model,\n",
    "                    nhead=nhead,\n",
    "                    dim_feedforward=dim_feedforward,\n",
    "                    dropout=drop_rate,\n",
    "                    batch_first=True\n",
    "                ),\n",
    "                num_layers=num_layers\n",
    "            ) for _ in range(relation_nums)\n",
    "        ])\n",
    "\n",
    "        self.classifier = nn.Sequential(\n",
    "            nn.Linear(relation_nums * d_model, 512),\n",
    "            nn.ReLU(),\n",
    "            nn.Dropout(drop_rate),\n",
    "            nn.Linear(512, out_feat)\n",
    "        )\n",
    "        for p in self.parameters():\n",
    "            if p.dim() > 1:\n",
    "                nn.init.xavier_uniform_(p)\n",
    "\n",
    "    #         self.node_feature_extractor = nn.Sequential(\n",
    "    #             nn.Linear(relation_nums * d_model, d_model),\n",
    "    #             nn.ReLU(),\n",
    "    #             nn.LayerNorm(d_model)\n",
    "    #         )\n",
    "\n",
    "    def forward(self, subgraph_batch):\n",
    "        # 为每个子图单独处理\n",
    "        center_logits_list = []\n",
    "        center_features_list = []\n",
    "\n",
    "        for sg in subgraph_batch:\n",
    "            # 处理单个子图\n",
    "            features = self.feature_proj(sg['features'].unsqueeze(0))  # [1, num_nodes, d_model]\n",
    "            features = self.norm(features)\n",
    "\n",
    "            # 为每个关系类型单独处理\n",
    "            rel_outputs = []\n",
    "            num_nodes = features.size(1)\n",
    "\n",
    "            for rel_idx in range(self.relation_nums):\n",
    "                # 构建当前关系的邻接矩阵\n",
    "                edge_index = sg['edges'][rel_idx]\n",
    "                adj = torch.zeros(num_nodes, num_nodes,\n",
    "                                  dtype=torch.float, device=features.device)\n",
    "\n",
    "                if edge_index.size(1) > 0:\n",
    "                    src, dst = edge_index\n",
    "                    adj[src, dst] = 1.0\n",
    "\n",
    "                # 添加自环\n",
    "                adj[range(num_nodes), range(num_nodes)] = 1.0\n",
    "\n",
    "                # 创建注意力掩码\n",
    "                adj_mask = adj.masked_fill(adj == 0.0, float('-inf'))\n",
    "                adj_mask = adj_mask.masked_fill(adj == 1.0, 0.0)\n",
    "\n",
    "                # 扩展为多头注意力掩码\n",
    "                adj_mask = adj_mask.unsqueeze(0).unsqueeze(0)  # [1, 1, num_nodes, num_nodes]\n",
    "                adj_mask = adj_mask.expand(1, self.nhead, num_nodes, num_nodes)\n",
    "                adj_mask = adj_mask.reshape(-1, num_nodes, num_nodes)\n",
    "\n",
    "                # 关系编码\n",
    "                encoder_output = self.relation_encoders[rel_idx](\n",
    "                    src=features,\n",
    "                    mask=adj_mask\n",
    "                )\n",
    "                rel_outputs.append(encoder_output)\n",
    "\n",
    "            # 合并多关系特征\n",
    "            combined = torch.cat(rel_outputs, dim=-1)  # [1, num_nodes, rel*d_model]\n",
    "\n",
    "            # 提取中心节点特征（第一个节点）\n",
    "            center_features = combined[:, 0, :]  # [1, rel*d_model]\n",
    "            center_logits = self.classifier(center_features)  # [1, out_feat]\n",
    "            center_logits = F.log_softmax(center_logits, dim=-1)\n",
    "\n",
    "            center_logits_list.append(center_logits)\n",
    "            center_features_list.append(center_features)\n",
    "\n",
    "        # 将结果堆叠为批次\n",
    "        center_logits = torch.cat(center_logits_list, dim=0)\n",
    "        center_features = torch.cat(center_features_list, dim=0)\n",
    "\n",
    "        return center_logits, center_features\n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "5ba01b16",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "cuda\n",
      "loading data...\n",
      "\n",
      "=== Starting Pretraining ===\n",
      "local_loss: 0.24077970578109703,global_loss: 0.38898971676826477\n",
      "local_loss: 0.5309440543002869,global_loss: 1.145205020904541\n",
      "local_loss: 0.23683794257590876,global_loss: 1.0929845571517944\n",
      "local_loss: 0.4337542232678742,global_loss: 0.7041538953781128\n",
      "local_loss: 0.28919804016547707,global_loss: -0.13940614461898804\n",
      "local_loss: 0.3142543976607615,global_loss: 0.7919521331787109\n",
      "local_loss: 0.8370750785540604,global_loss: 0.5171525478363037\n",
      "local_loss: 0.482150498641184,global_loss: 0.9915789365768433\n",
      "feature_proj.0.weight 0.0017287993105128407 0.1507774442434311\n",
      "feature_proj.0.bias 0.019704412668943405 0.10772006958723068\n",
      "feature_proj.1.weight 1.0001249313354492 0.0009839767590165138\n",
      "feature_proj.1.bias 6.250083970371634e-05 0.0009900262812152505\n",
      "norm.weight 1.0000312328338623 0.0010074147721752524\n",
      "norm.bias 9.374960063723847e-05 0.0010034642182290554\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_weight -0.0007152182515710592 0.08815738558769226\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_bias -4.160399839747697e-05 0.0008216837886720896\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.weight -0.0010108896531164646 0.12493324279785156\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.bias -3.124010254396126e-05 0.001007396960631013\n",
      "relation_encoders.0.layers.0.linear1.weight -0.0001463711232645437 0.07911663502454758\n",
      "relation_encoders.0.layers.0.linear1.bias -0.005934635177254677 0.07296987622976303\n",
      "relation_encoders.0.layers.0.linear2.weight -0.00036576949059963226 0.078732430934906\n",
      "relation_encoders.0.layers.0.linear2.bias -0.009301872923970222 0.03446188569068909\n",
      "relation_encoders.0.layers.0.norm1.weight 0.9999998807907104 0.0010078989434987307\n",
      "relation_encoders.0.layers.0.norm1.bias 0.00012500077718868852 0.0009999943431466818\n",
      "relation_encoders.0.layers.0.norm2.weight 0.9999374151229858 0.0010059108026325703\n",
      "relation_encoders.0.layers.0.norm2.bias 6.251056038308889e-05 0.001005917671136558\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_weight -0.00015763465489726514 0.08816998451948166\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_bias 1.695972605375573e-05 0.0008220244781114161\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.weight -0.0014156806282699108 0.1253124177455902\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.bias 6.249896250665188e-05 0.0010059307096526027\n",
      "relation_encoders.0.layers.1.linear1.weight 0.0007595249335281551 0.07909536361694336\n",
      "relation_encoders.0.layers.1.linear1.bias -0.005856510251760483 0.07299556583166122\n",
      "relation_encoders.0.layers.1.linear2.weight -0.0003651314473245293 0.07920871675014496\n",
      "relation_encoders.0.layers.1.linear2.bias -0.00920812413096428 0.03416810929775238\n",
      "relation_encoders.0.layers.1.norm1.weight 0.9999999403953552 0.001007898012176156\n",
      "relation_encoders.0.layers.1.norm1.bias 0.00018749854643829167 0.0009900258155539632\n",
      "relation_encoders.0.layers.1.norm2.weight 0.9999999403953552 0.001007905462756753\n",
      "relation_encoders.0.layers.1.norm2.bias 9.374874935019761e-05 0.0010034629376605153\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_weight -1.9826044081128202e-05 0.08797493577003479\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_bias -2.0243107428541407e-05 0.0008205364574678242\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.weight 0.0028324597515165806 0.1250809282064438\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.bias 0.00015624859952367842 0.0009955225978046656\n",
      "relation_encoders.0.layers.2.linear1.weight -0.0006409037741832435 0.07865375280380249\n",
      "relation_encoders.0.layers.2.linear1.bias -0.0059346361085772514 0.07293037325143814\n",
      "relation_encoders.0.layers.2.linear2.weight -9.068972576642409e-05 0.07871931791305542\n",
      "relation_encoders.0.layers.2.linear2.bias -0.009020624682307243 0.03456113114953041\n",
      "relation_encoders.0.layers.2.norm1.weight 0.9999374151229858 0.0010059238411486149\n",
      "relation_encoders.0.layers.2.norm1.bias 0.00012499863805714995 0.0009999971371144056\n",
      "relation_encoders.0.layers.2.norm2.weight 0.9996561408042908 0.0009464756585657597\n",
      "relation_encoders.0.layers.2.norm2.bias -2.0393370505189523e-05 0.0005529069458134472\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_weight 0.00036161133903078735 0.08819251507520676\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_bias 1.4452816685661674e-05 0.0008237812435254455\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.weight 0.00048632416292093694 0.12414687871932983\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.bias 6.24996901024133e-05 0.0010059322230517864\n",
      "relation_encoders.1.layers.0.linear1.weight 3.298191586509347e-06 0.07904745638370514\n",
      "relation_encoders.1.layers.0.linear1.bias 0.0026275310665369034 0.07193814963102341\n",
      "relation_encoders.1.layers.0.linear2.weight -0.00032737216679379344 0.07915487885475159\n",
      "relation_encoders.1.layers.0.linear2.bias -0.002235625870525837 0.037945136427879333\n",
      "relation_encoders.1.layers.0.norm1.weight 1.0000624656677246 0.001005899510346353\n",
      "relation_encoders.1.layers.0.norm1.bias -0.000187513796845451 0.0009899769211187959\n",
      "relation_encoders.1.layers.0.norm2.weight 1.0000312328338623 0.0010073883458971977\n",
      "relation_encoders.1.layers.0.norm2.bias 6.249803118407726e-05 0.0010059317573904991\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_weight -0.0003622198710218072 0.08944755792617798\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_bias -2.968374428746756e-05 0.0008205355843529105\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.weight 0.003204642329365015 0.12488726526498795\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.bias -3.1249735911842436e-05 0.0010074109304696321\n",
      "relation_encoders.1.layers.1.linear1.weight -0.0011182029265910387 0.0788201242685318\n",
      "relation_encoders.1.layers.1.linear1.bias 0.0026744045317173004 0.07183992862701416\n",
      "relation_encoders.1.layers.1.linear2.weight 0.0006036005215719342 0.07866758108139038\n",
      "relation_encoders.1.layers.1.linear2.bias -0.0023293758276849985 0.038070790469646454\n",
      "relation_encoders.1.layers.1.norm1.weight 1.000093698501587 0.0010034627048298717\n",
      "relation_encoders.1.layers.1.norm1.bias -3.125001967418939e-05 0.0010074095334857702\n",
      "relation_encoders.1.layers.1.norm2.weight 1.0001873970031738 0.0009900276781991124\n",
      "relation_encoders.1.layers.1.norm2.bias -9.374603541800752e-05 0.001003459794446826\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_weight 0.00016341579612344503 0.08849358558654785\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_bias -3.4869517548941076e-05 0.0008189640357159078\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.weight -0.00030681485077366233 0.12454890459775925\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.bias 0.0002812485909089446 0.0009672180749475956\n",
      "relation_encoders.1.layers.2.linear1.weight 0.0010007518576458097 0.07944739609956741\n",
      "relation_encoders.1.layers.2.linear1.bias 0.002713460009545088 0.07205899804830551\n",
      "relation_encoders.1.layers.2.linear2.weight -8.061094558797777e-05 0.07945025712251663\n",
      "relation_encoders.1.layers.2.linear2.bias -0.002298128791153431 0.037824466824531555\n",
      "relation_encoders.1.layers.2.norm1.weight 1.0001249313354492 0.0010000061010941863\n",
      "relation_encoders.1.layers.2.norm1.bias -3.125597140751779e-05 0.0010074033634737134\n",
      "relation_encoders.1.layers.2.norm2.weight 0.9998749494552612 0.000999992829747498\n",
      "relation_encoders.1.layers.2.norm2.bias -7.376623398158699e-05 0.0004728689673356712\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_weight 0.0004728464409708977 0.08851735293865204\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_bias 3.2901614758884534e-05 0.0008200122392736375\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.weight 0.0029821975622326136 0.12570218741893768\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.bias 3.1247298466041684e-05 0.0010074080200865865\n",
      "relation_encoders.2.layers.0.linear1.weight -7.121317321434617e-05 0.07938937842845917\n",
      "relation_encoders.2.layers.0.linear1.bias -0.0005585969192907214 0.06871385127305984\n",
      "relation_encoders.2.layers.0.linear2.weight 0.0002466989099048078 0.07875275611877441\n",
      "relation_encoders.2.layers.0.linear2.bias 0.009027974680066109 0.03322184085845947\n",
      "relation_encoders.2.layers.0.norm1.weight 0.9999374151229858 0.0010059333872050047\n",
      "relation_encoders.2.layers.0.norm1.bias -4.064349923282862e-08 0.0010078592458739877\n",
      "relation_encoders.2.layers.0.norm2.weight 1.000093698501587 0.0010034628212451935\n",
      "relation_encoders.2.layers.0.norm2.bias 6.249949365155771e-05 0.0010059317573904991\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_weight 0.00040453189285472035 0.08891645818948746\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_bias 6.571468111360446e-05 0.0008178144926205277\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.weight -0.0010792661923915148 0.12427137047052383\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.bias -9.374195360578597e-05 0.0010034546721726656\n",
      "relation_encoders.2.layers.1.linear1.weight 0.00020600453717634082 0.07930707186460495\n",
      "relation_encoders.2.layers.1.linear1.bias -0.0005742041394114494 0.06865929067134857\n",
      "relation_encoders.2.layers.1.linear2.weight -0.00016380644228775054 0.07922869175672531\n",
      "relation_encoders.2.layers.1.linear2.bias 0.008996726013720036 0.033161990344524384\n",
      "relation_encoders.2.layers.1.norm1.weight 0.9999686479568481 0.001007413724437356\n",
      "relation_encoders.2.layers.1.norm1.bias -9.374965156894177e-05 0.0010034608421847224\n",
      "relation_encoders.2.layers.1.norm2.weight 1.000093698501587 0.001003393204882741\n",
      "relation_encoders.2.layers.1.norm2.bias 0.00012500047159846872 0.0009999964386224747\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_weight -0.00046037061838433146 0.08849549293518066\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_bias 3.177948747179471e-05 0.0008195458794943988\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.weight 2.5359971914440393e-05 0.12503594160079956\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.bias 3.1249299354385585e-05 0.0010074106976389885\n",
      "relation_encoders.2.layers.2.linear1.weight 8.542800787836313e-05 0.07901866734027863\n",
      "relation_encoders.2.layers.2.linear1.bias -0.0005351472645998001 0.0687151774764061\n",
      "relation_encoders.2.layers.2.linear2.weight -0.0005151035147719085 0.07935141026973724\n",
      "relation_encoders.2.layers.2.linear2.bias 0.009090473875403404 0.03313402086496353\n",
      "relation_encoders.2.layers.2.norm1.weight 1.000093698501587 0.0010034552542492747\n",
      "relation_encoders.2.layers.2.norm1.bias -2.9831426218152046e-10 0.0010079012718051672\n",
      "relation_encoders.2.layers.2.norm2.weight 0.9998124837875366 0.0009900296572595835\n",
      "relation_encoders.2.layers.2.norm2.bias 1.7093738279072568e-05 0.0005221667233854532\n",
      "classifier.0.weight -6.929288701940095e-06 0.05330759659409523\n",
      "classifier.0.bias -0.0012545359786599874 0.04204427823424339\n",
      "classifier.3.weight -0.0001551336608827114 0.06239447370171547\n",
      "classifier.3.bias -2.175290137529373e-05 0.02749512903392315\n",
      "Pretrain Epoch: 000, BSNE Loss: 1.1072\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "local_loss: 0.35847220216469966,global_loss: 0.8408559560775757\n",
      "local_loss: 0.5828394414988239,global_loss: 0.5371885299682617\n",
      "local_loss: 0.31661024481740124,global_loss: 0.8489018678665161\n",
      "local_loss: 0.27282050139086444,global_loss: 0.5393134951591492\n",
      "local_loss: 0.2776667345979521,global_loss: 0.548024594783783\n",
      "local_loss: 0.15064471262112902,global_loss: 0.6210936903953552\n",
      "local_loss: 0.37023451344958064,global_loss: 0.7447234988212585\n",
      "local_loss: 0.4416000576624181,global_loss: 1.0424823760986328\n",
      "feature_proj.0.weight 0.0017430728767067194 0.15079434216022491\n",
      "feature_proj.0.bias 0.019752269610762596 0.10779378563165665\n",
      "feature_proj.1.weight 1.0001640319824219 0.0015123961493372917\n",
      "feature_proj.1.bias 0.00011522023123688996 0.0014537879033014178\n",
      "norm.weight 0.9999994039535522 0.0015207893447950482\n",
      "norm.bias 0.0001338340516667813 0.0014729276299476624\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_weight -0.0007034430163912475 0.08816464990377426\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_bias -7.275732059497386e-05 0.0012003136798739433\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.weight -0.0010062975343316793 0.12494668364524841\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.bias 1.3917233445681632e-05 0.001506775850430131\n",
      "relation_encoders.0.layers.0.linear1.weight -0.00014650198863819242 0.07911824434995651\n",
      "relation_encoders.0.layers.0.linear1.bias -0.005893273279070854 0.07294066995382309\n",
      "relation_encoders.0.layers.0.linear2.weight -0.00036901829298585653 0.07874315232038498\n",
      "relation_encoders.0.layers.0.linear2.bias -0.009317588992416859 0.03457029163837433\n",
      "relation_encoders.0.layers.0.norm1.weight 1.0000157356262207 0.001497546210885048\n",
      "relation_encoders.0.layers.0.norm1.bias 0.00011853183968923986 0.00145811983384192\n",
      "relation_encoders.0.layers.0.norm2.weight 0.9998446106910706 0.0015232773730531335\n",
      "relation_encoders.0.layers.0.norm2.bias -3.7667559809051454e-06 0.0014221288729459047\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_weight -0.00016144392429850996 0.08817098289728165\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_bias 3.629001002991572e-05 0.0012190710986033082\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.weight -0.001414780505001545 0.12532712519168854\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.bias 4.890118361799978e-05 0.0014468481531366706\n",
      "relation_encoders.0.layers.1.linear1.weight 0.0007597595104016364 0.0791090577840805\n",
      "relation_encoders.0.layers.1.linear1.bias -0.005821924656629562 0.07306565344333649\n",
      "relation_encoders.0.layers.1.linear2.weight -0.0003688693977892399 0.07922343909740448\n",
      "relation_encoders.0.layers.1.linear2.bias -0.009227469563484192 0.03411289304494858\n",
      "relation_encoders.0.layers.1.norm1.weight 0.9999995231628418 0.001525222440250218\n",
      "relation_encoders.0.layers.1.norm1.bias 0.0003323382406961173 0.0013793647522106767\n",
      "relation_encoders.0.layers.1.norm2.weight 1.0000730752944946 0.0014627280179411173\n",
      "relation_encoders.0.layers.1.norm2.bias 0.00016501059872098267 0.0015130453975871205\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_weight -1.8967006326420233e-05 0.0879901796579361\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_bias -2.0269048036425374e-05 0.0012199506163597107\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.weight 0.0028238510712981224 0.12508240342140198\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.bias 8.342138607986271e-05 0.0015123756602406502\n",
      "relation_encoders.0.layers.2.linear1.weight -0.0006375400116667151 0.0786624476313591\n",
      "relation_encoders.0.layers.2.linear1.bias -0.005954251624643803 0.0729975774884224\n",
      "relation_encoders.0.layers.2.linear2.weight -8.936810627346858e-05 0.07872974872589111\n",
      "relation_encoders.0.layers.2.linear2.bias -0.009003178216516972 0.034614600241184235\n",
      "relation_encoders.0.layers.2.norm1.weight 0.9999284744262695 0.0015332016628235579\n",
      "relation_encoders.0.layers.2.norm1.bias 0.0001575743081048131 0.0014780662022531033\n",
      "relation_encoders.0.layers.2.norm2.weight 0.9994232654571533 0.0014935695799067616\n",
      "relation_encoders.0.layers.2.norm2.bias 1.349104059045203e-05 0.0009145871736109257\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_weight 0.00035808514803647995 0.0881994292140007\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_bias 3.4798140404745936e-05 0.0011827565031126142\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.weight 0.0004777928988914937 0.12414675951004028\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.bias 0.00012943241745233536 0.0015311288880184293\n",
      "relation_encoders.1.layers.0.linear1.weight 9.692827006801963e-06 0.07905826717615128\n",
      "relation_encoders.1.layers.0.linear1.bias 0.002514579799026251 0.07188926637172699\n",
      "relation_encoders.1.layers.0.linear2.weight -0.00032629515044391155 0.07916591316461563\n",
      "relation_encoders.1.layers.0.linear2.bias -0.002195734065026045 0.03804061934351921\n",
      "relation_encoders.1.layers.0.norm1.weight 1.0000419616699219 0.0014841726515442133\n",
      "relation_encoders.1.layers.0.norm1.bias -0.00036601413739845157 0.0014804827515035868\n",
      "relation_encoders.1.layers.0.norm2.weight 0.9999778270721436 0.0015658537158742547\n",
      "relation_encoders.1.layers.0.norm2.bias 3.0881656130077317e-06 0.0015130348037928343\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_weight -0.000361665734089911 0.08944211900234222\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_bias -1.7266882423427887e-05 0.0012513926485553384\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.weight 0.0032139045652002096 0.12488240748643875\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.bias -6.973544805077836e-05 0.0015350481262430549\n",
      "relation_encoders.1.layers.1.linear1.weight -0.0011206356575712562 0.07882945984601974\n",
      "relation_encoders.1.layers.1.linear1.bias 0.0026252660900354385 0.07179892808198929\n",
      "relation_encoders.1.layers.1.linear2.weight 0.0005803594249300659 0.07868211716413498\n",
      "relation_encoders.1.layers.1.linear2.bias -0.002366090891882777 0.03810647130012512\n",
      "relation_encoders.1.layers.1.norm1.weight 1.0000959634780884 0.0016112517332658172\n",
      "relation_encoders.1.layers.1.norm1.bias -5.38924359716475e-05 0.0015473994426429272\n",
      "relation_encoders.1.layers.1.norm2.weight 1.000278115272522 0.0015730141894891858\n",
      "relation_encoders.1.layers.1.norm2.bias -0.00012517216964624822 0.0014808473642915487\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_weight 0.00016241014236584306 0.08850540965795517\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_bias -1.6030648112064227e-05 0.0012003268348053098\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.weight -0.00030816177604719996 0.12456419318914413\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.bias 0.0004077685880474746 0.0014681185130029917\n",
      "relation_encoders.1.layers.2.linear1.weight 0.0010010622208938003 0.07945216447114944\n",
      "relation_encoders.1.layers.2.linear1.bias 0.002678604330867529 0.07216591387987137\n",
      "relation_encoders.1.layers.2.linear2.weight -9.133821004070342e-05 0.07945790886878967\n",
      "relation_encoders.1.layers.2.linear2.bias -0.0023249266669154167 0.03791254386305809\n",
      "relation_encoders.1.layers.2.norm1.weight 1.0001682043075562 0.001525768544524908\n",
      "relation_encoders.1.layers.2.norm1.bias -9.364834113512188e-05 0.0014883268158882856\n",
      "relation_encoders.1.layers.2.norm2.weight 0.9996323585510254 0.001639427151530981\n",
      "relation_encoders.1.layers.2.norm2.bias -9.550123650114983e-05 0.000794681254774332\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_weight 0.00047130449092946947 0.08852171897888184\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_bias 3.29004687955603e-05 0.0012382251443341374\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.weight 0.002989646978676319 0.12570986151695251\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.bias 4.1337087168358266e-05 0.0016374288825318217\n",
      "relation_encoders.2.layers.0.linear1.weight -6.550783291459084e-05 0.07939096540212631\n",
      "relation_encoders.2.layers.0.linear1.bias -0.0005917362868785858 0.06880243122577667\n",
      "relation_encoders.2.layers.0.linear2.weight 0.00024519427097402513 0.07875650376081467\n",
      "relation_encoders.2.layers.0.linear2.bias 0.00896880030632019 0.033341314643621445\n",
      "relation_encoders.2.layers.0.norm1.weight 0.9999176859855652 0.0016712067881599069\n",
      "relation_encoders.2.layers.0.norm1.bias 2.3440938093699515e-05 0.001569275977090001\n",
      "relation_encoders.2.layers.0.norm2.weight 1.0000989437103271 0.0015639049233868718\n",
      "relation_encoders.2.layers.0.norm2.bias 0.00011463135160738602 0.0015827941242605448\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_weight 0.00040536755113862455 0.08892799913883209\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_bias 6.725054117850959e-05 0.0012474297545850277\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.weight -0.0010780943557620049 0.12427431344985962\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.bias -0.0001488210546085611 0.0014962626155465841\n",
      "relation_encoders.2.layers.1.linear1.weight 0.00020240762387402356 0.07932490110397339\n",
      "relation_encoders.2.layers.1.linear1.bias -0.0005376150365918875 0.06866636127233505\n",
      "relation_encoders.2.layers.1.linear2.weight -0.00015886526671238244 0.07924623787403107\n",
      "relation_encoders.2.layers.1.linear2.bias 0.009051574394106865 0.03316781297326088\n",
      "relation_encoders.2.layers.1.norm1.weight 1.0000228881835938 0.0014603406889364123\n",
      "relation_encoders.2.layers.1.norm1.bias -7.247300527524203e-05 0.001494616735726595\n",
      "relation_encoders.2.layers.1.norm2.weight 1.000100016593933 0.0015205489471554756\n",
      "relation_encoders.2.layers.1.norm2.bias 0.00014785255189053714 0.00141257478389889\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_weight -0.0004605994326993823 0.08848470449447632\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_bias 4.6798882976872846e-05 0.0012676605256274343\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.weight 2.56694620475173e-05 0.12504437565803528\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.bias 0.00014490405737888068 0.0015256996266543865\n",
      "relation_encoders.2.layers.2.linear1.weight 8.593458915129304e-05 0.07902977615594864\n",
      "relation_encoders.2.layers.2.linear1.bias -0.0005441703833639622 0.06871999800205231\n",
      "relation_encoders.2.layers.2.linear2.weight -0.00048620402230881155 0.07935822010040283\n",
      "relation_encoders.2.layers.2.linear2.bias 0.009152434766292572 0.033117104321718216\n",
      "relation_encoders.2.layers.2.norm1.weight 1.0001730918884277 0.001485195942223072\n",
      "relation_encoders.2.layers.2.norm1.bias 1.4554665540345013e-05 0.0014238342409953475\n",
      "relation_encoders.2.layers.2.norm2.weight 0.9996034502983093 0.0015712741296738386\n",
      "relation_encoders.2.layers.2.norm2.bias 8.646539936307818e-05 0.0008231610408984125\n",
      "classifier.0.weight -6.929288701940095e-06 0.05330759659409523\n",
      "classifier.0.bias -0.0012545359786599874 0.04204427823424339\n",
      "classifier.3.weight -0.0001551336608827114 0.06239447370171547\n",
      "classifier.3.bias -2.175290137529373e-05 0.02749512903392315\n",
      "Pretrain Epoch: 001, BSNE Loss: 1.0617\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "local_loss: 0.331299666806569,global_loss: 0.22230367362499237\n",
      "local_loss: 0.36379557759751235,global_loss: 0.4932469427585602\n",
      "local_loss: 0.3278834264509106,global_loss: 1.1111106872558594\n",
      "local_loss: 0.43312059838619804,global_loss: 1.2376182079315186\n",
      "local_loss: 0.32136221530075815,global_loss: 0.5749085545539856\n",
      "local_loss: 0.4961518455078405,global_loss: 0.8756234645843506\n",
      "local_loss: 0.5163381364553391,global_loss: 0.9514732360839844\n",
      "local_loss: 0.558980079940234,global_loss: 0.7361615896224976\n",
      "feature_proj.0.weight 0.001741571701131761 0.15079839527606964\n",
      "feature_proj.0.bias 0.01974574476480484 0.10788658261299133\n",
      "feature_proj.1.weight 1.00018310546875 0.0018868105253204703\n",
      "feature_proj.1.bias 0.00010120431397808716 0.0017796733882278204\n",
      "norm.weight 1.0000052452087402 0.0019250819459557533\n",
      "norm.bias 0.0001226525055244565 0.0018460046267136931\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_weight -0.0006934572011232376 0.08817321062088013\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_bias -9.263215906685218e-05 0.001577907707542181\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.weight -0.0009987547527998686 0.12495027482509613\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.bias 6.778307579224929e-05 0.0019558719359338284\n",
      "relation_encoders.0.layers.0.linear1.weight -0.00014677498256787658 0.07912223786115646\n",
      "relation_encoders.0.layers.0.linear1.bias -0.005895201116800308 0.07293345034122467\n",
      "relation_encoders.0.layers.0.linear2.weight -0.0003718397347256541 0.07875190675258636\n",
      "relation_encoders.0.layers.0.linear2.bias -0.009312815964221954 0.034599270671606064\n",
      "relation_encoders.0.layers.0.norm1.weight 0.9999800324440002 0.001982105430215597\n",
      "relation_encoders.0.layers.0.norm1.bias 9.349259198643267e-05 0.0018350735772401094\n",
      "relation_encoders.0.layers.0.norm2.weight 0.9997588992118835 0.0019833040423691273\n",
      "relation_encoders.0.layers.0.norm2.bias -6.277541979216039e-05 0.001794634386897087\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_weight -0.00016094616148620844 0.08817070722579956\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_bias 4.2976520489901304e-05 0.001517089200206101\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.weight -0.0014143940061330795 0.1253393143415451\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.bias -8.105780580081046e-06 0.00185204460285604\n",
      "relation_encoders.0.layers.1.linear1.weight 0.0007617728551849723 0.07911698520183563\n",
      "relation_encoders.0.layers.1.linear1.bias -0.005835637915879488 0.07308131456375122\n",
      "relation_encoders.0.layers.1.linear2.weight -0.0003768852329812944 0.07923483848571777\n",
      "relation_encoders.0.layers.1.linear2.bias -0.009276365861296654 0.03408394753932953\n",
      "relation_encoders.0.layers.1.norm1.weight 0.9999654293060303 0.001966264797374606\n",
      "relation_encoders.0.layers.1.norm1.bias 0.00039221905171871185 0.0017764235381036997\n",
      "relation_encoders.0.layers.1.norm2.weight 1.0000762939453125 0.0018655387684702873\n",
      "relation_encoders.0.layers.1.norm2.bias 0.00018002084107138216 0.0018806790467351675\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_weight -1.967378193512559e-05 0.08799702674150467\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_bias 2.3682940053504353e-08 0.0014562756987288594\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.weight 0.002810947597026825 0.12508268654346466\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.bias 2.708396641537547e-05 0.0019228311721235514\n",
      "relation_encoders.0.layers.2.linear1.weight -0.0006371692288666964 0.07867473363876343\n",
      "relation_encoders.0.layers.2.linear1.bias -0.005927964113652706 0.07307625561952591\n",
      "relation_encoders.0.layers.2.linear2.weight -8.512636850355193e-05 0.07874231785535812\n",
      "relation_encoders.0.layers.2.linear2.bias -0.009001472033560276 0.03466621786355972\n",
      "relation_encoders.0.layers.2.norm1.weight 0.9999423623085022 0.0018283654935657978\n",
      "relation_encoders.0.layers.2.norm1.bias 0.0002098708355333656 0.0018661139765754342\n",
      "relation_encoders.0.layers.2.norm2.weight 0.9991254806518555 0.0018451757496222854\n",
      "relation_encoders.0.layers.2.norm2.bias 6.272570317378268e-05 0.001193299307487905\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_weight 0.0003505135828163475 0.08820175379514694\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_bias 5.990680801915005e-05 0.0015317945508286357\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.weight 0.00047437759349122643 0.12414762377738953\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.bias 0.0001657414250075817 0.0019462840864434838\n",
      "relation_encoders.1.layers.0.linear1.weight 1.3303302694112062e-05 0.07907107472419739\n",
      "relation_encoders.1.layers.0.linear1.bias 0.0024694781750440598 0.07188111543655396\n",
      "relation_encoders.1.layers.0.linear2.weight -0.0003308844461571425 0.07917816936969757\n",
      "relation_encoders.1.layers.0.linear2.bias -0.00216892478056252 0.038046177476644516\n",
      "relation_encoders.1.layers.0.norm1.weight 1.0000720024108887 0.0019123994279652834\n",
      "relation_encoders.1.layers.0.norm1.bias -0.00044693078962154686 0.0019318389240652323\n",
      "relation_encoders.1.layers.0.norm2.weight 0.9999046325683594 0.00198793550953269\n",
      "relation_encoders.1.layers.0.norm2.bias 5.251367838354781e-06 0.001978282816708088\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_weight -0.0003596106544137001 0.08944085985422134\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_bias -3.527548324200325e-05 0.001665986143052578\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.weight 0.003219296457245946 0.12486593425273895\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.bias -9.79224787442945e-05 0.002011388074606657\n",
      "relation_encoders.1.layers.1.linear1.weight -0.001121419481933117 0.07884190231561661\n",
      "relation_encoders.1.layers.1.linear1.bias 0.002602727385237813 0.07175524532794952\n",
      "relation_encoders.1.layers.1.linear2.weight 0.000556326936930418 0.07869508862495422\n",
      "relation_encoders.1.layers.1.linear2.bias -0.0024234000593423843 0.0380740761756897\n",
      "relation_encoders.1.layers.1.norm1.weight 1.0000500679016113 0.002025899011641741\n",
      "relation_encoders.1.layers.1.norm1.bias -8.244077616836876e-05 0.0020493760239332914\n",
      "relation_encoders.1.layers.1.norm2.weight 1.0003230571746826 0.0019235190702602267\n",
      "relation_encoders.1.layers.1.norm2.bias -0.00021368474699556828 0.0018919861176982522\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_weight 0.00016291851352434605 0.08851855248212814\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_bias -8.667690053698607e-06 0.0014908832963556051\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.weight -0.0002992835361510515 0.12457531690597534\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.bias 0.0004998536896891892 0.0018984285416081548\n",
      "relation_encoders.1.layers.2.linear1.weight 0.0010026778327301145 0.07945812493562698\n",
      "relation_encoders.1.layers.2.linear1.bias 0.0026802816428244114 0.07223150879144669\n",
      "relation_encoders.1.layers.2.linear2.weight -0.00010086392285302281 0.07946615666151047\n",
      "relation_encoders.1.layers.2.linear2.bias -0.0023655560798943043 0.03792545571923256\n",
      "relation_encoders.1.layers.2.norm1.weight 1.000218152999878 0.0018844256410375237\n",
      "relation_encoders.1.layers.2.norm1.bias -0.00016086897812783718 0.0019011113326996565\n",
      "relation_encoders.1.layers.2.norm2.weight 0.9993998408317566 0.0020450546871870756\n",
      "relation_encoders.1.layers.2.norm2.bias -0.00011064387945225462 0.0010500254575163126\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_weight 0.0004711490764748305 0.08852791041135788\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_bias 2.7089416107628495e-05 0.001592869171872735\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.weight 0.0029850644059479237 0.12571485340595245\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.bias 7.149198791012168e-05 0.002096828306093812\n",
      "relation_encoders.2.layers.0.linear1.weight -5.8205798268318176e-05 0.07939524948596954\n",
      "relation_encoders.2.layers.0.linear1.bias -0.0006532019469887018 0.06884155422449112\n",
      "relation_encoders.2.layers.0.linear2.weight 0.00023985974257811904 0.0787607878446579\n",
      "relation_encoders.2.layers.0.linear2.bias 0.008927472867071629 0.03345504030585289\n",
      "relation_encoders.2.layers.0.norm1.weight 0.9999402761459351 0.002210583072155714\n",
      "relation_encoders.2.layers.0.norm1.bias -2.3383770894724876e-05 0.0020200908184051514\n",
      "relation_encoders.2.layers.0.norm2.weight 1.0000989437103271 0.0019995439797639847\n",
      "relation_encoders.2.layers.0.norm2.bias 8.646598143968731e-05 0.0020938003435730934\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_weight 0.0004010354168713093 0.08893688023090363\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_bias 7.139021181501448e-05 0.001570747816003859\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.weight -0.0010796907590702176 0.12428418546915054\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.bias -0.00020662116003222764 0.0018625017255544662\n",
      "relation_encoders.2.layers.1.linear1.weight 0.0001997531799133867 0.07933981716632843\n",
      "relation_encoders.2.layers.1.linear1.bias -0.000519926194101572 0.06865809112787247\n",
      "relation_encoders.2.layers.1.linear2.weight -0.00014543518773280084 0.07926249504089355\n",
      "relation_encoders.2.layers.1.linear2.bias 0.009131265804171562 0.03322826325893402\n",
      "relation_encoders.2.layers.1.norm1.weight 1.000028371810913 0.001776544377207756\n",
      "relation_encoders.2.layers.1.norm1.bias -7.707855547778308e-05 0.0019181171664968133\n",
      "relation_encoders.2.layers.1.norm2.weight 1.000103235244751 0.0019140475196763873\n",
      "relation_encoders.2.layers.1.norm2.bias 0.00013200809189584106 0.0017764855874702334\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_weight -0.000456936948467046 0.088482566177845\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_bias 2.5346414986415766e-05 0.0016145864501595497\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.weight 2.4229753762483597e-05 0.12504787743091583\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.bias 0.0002058267273241654 0.0019498646724969149\n",
      "relation_encoders.2.layers.2.linear1.weight 8.56050755828619e-05 0.07903914898633957\n",
      "relation_encoders.2.layers.2.linear1.bias -0.0005343265365809202 0.0687142163515091\n",
      "relation_encoders.2.layers.2.linear2.weight -0.0004716736439149827 0.07936929166316986\n",
      "relation_encoders.2.layers.2.linear2.bias 0.009176607243716717 0.0331076942384243\n",
      "relation_encoders.2.layers.2.norm1.weight 1.000234603881836 0.0017707577208057046\n",
      "relation_encoders.2.layers.2.norm1.bias -1.4143042790237814e-05 0.0018361755646765232\n",
      "relation_encoders.2.layers.2.norm2.weight 0.9993463754653931 0.0020239418372511864\n",
      "relation_encoders.2.layers.2.norm2.bias 0.0001840314653236419 0.0010532572632655501\n",
      "classifier.0.weight -6.929288701940095e-06 0.05330759659409523\n",
      "classifier.0.bias -0.0012545359786599874 0.04204427823424339\n",
      "classifier.3.weight -0.0001551336608827114 0.06239447370171547\n",
      "classifier.3.bias -2.175290137529373e-05 0.02749512903392315\n",
      "Pretrain Epoch: 002, BSNE Loss: 1.1939\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "local_loss: 0.22573480840503052,global_loss: 1.3960353136062622\n",
      "local_loss: 0.2487492501830384,global_loss: 0.42683398723602295\n",
      "local_loss: 0.41667603392077857,global_loss: 0.9793010950088501\n",
      "local_loss: 0.4637613585251056,global_loss: -0.09064552932977676\n",
      "local_loss: 0.21235888023341956,global_loss: 1.2863693237304688\n",
      "local_loss: 0.5427531143148364,global_loss: -0.2682988941669464\n",
      "local_loss: 0.3614275412545898,global_loss: 0.6440362334251404\n",
      "local_loss: 0.24770895933382947,global_loss: 0.32505783438682556\n",
      "feature_proj.0.weight 0.0016811725217849016 0.1508258730173111\n",
      "feature_proj.0.bias 0.019669968634843826 0.10792619735002518\n",
      "feature_proj.1.weight 1.00021231174469 0.002218774985522032\n",
      "feature_proj.1.bias 0.00013117707567289472 0.0020900871604681015\n",
      "norm.weight 1.0000172853469849 0.0023307933006435633\n",
      "norm.bias 9.721405513118953e-05 0.0022010428365319967\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_weight -0.0006832617218606174 0.08818525820970535\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_bias -0.00010538237256696448 0.001924528507515788\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.weight -0.0009888970525935292 0.12495061755180359\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.bias 0.00011354531307006255 0.002301863394677639\n",
      "relation_encoders.0.layers.0.linear1.weight -0.00014750804984942079 0.07912607491016388\n",
      "relation_encoders.0.layers.0.linear1.bias -0.005898623261600733 0.07289540022611618\n",
      "relation_encoders.0.layers.0.linear2.weight -0.00037418195279315114 0.07876043021678925\n",
      "relation_encoders.0.layers.0.linear2.bias -0.009307748638093472 0.03461163491010666\n",
      "relation_encoders.0.layers.0.norm1.weight 0.99996018409729 0.0023979630786925554\n",
      "relation_encoders.0.layers.0.norm1.bias 3.6885132431052625e-05 0.0020919223316013813\n",
      "relation_encoders.0.layers.0.norm2.weight 0.9997341632843018 0.002287724055349827\n",
      "relation_encoders.0.layers.0.norm2.bias -3.716578066814691e-05 0.0020172293297946453\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_weight -0.0001608256425242871 0.08817382156848907\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_bias 4.087868728674948e-05 0.0017204590840265155\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.weight -0.0014165694592520595 0.12534856796264648\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.bias -3.110332909272984e-05 0.0021259975619614124\n",
      "relation_encoders.0.layers.1.linear1.weight 0.0007632080232724547 0.07912567257881165\n",
      "relation_encoders.0.layers.1.linear1.bias -0.00585197564214468 0.0730685219168663\n",
      "relation_encoders.0.layers.1.linear2.weight -0.00037873833207413554 0.0792451873421669\n",
      "relation_encoders.0.layers.1.linear2.bias -0.009294411167502403 0.034048665314912796\n",
      "relation_encoders.0.layers.1.norm1.weight 0.9999780058860779 0.002230120589956641\n",
      "relation_encoders.0.layers.1.norm1.bias 0.00040779588744044304 0.0020639754366129637\n",
      "relation_encoders.0.layers.1.norm2.weight 1.0000649690628052 0.0021661752834916115\n",
      "relation_encoders.0.layers.1.norm2.bias 0.00019373506074771285 0.0021996984723955393\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_weight -2.1406091036624275e-05 0.08800604939460754\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_bias 5.126957148604561e-06 0.0017120976699516177\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.weight 0.002797478809952736 0.1250842958688736\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.bias -3.177658072672784e-05 0.002253081416711211\n",
      "relation_encoders.0.layers.2.linear1.weight -0.0006365537410601974 0.0786871388554573\n",
      "relation_encoders.0.layers.2.linear1.bias -0.00589396245777607 0.07315827161073685\n",
      "relation_encoders.0.layers.2.linear2.weight -9.856314864009619e-05 0.07875627279281616\n",
      "relation_encoders.0.layers.2.linear2.bias -0.009024416096508503 0.03474806994199753\n",
      "relation_encoders.0.layers.2.norm1.weight 0.9999343156814575 0.0020859730429947376\n",
      "relation_encoders.0.layers.2.norm1.bias 0.00025813697720877826 0.0022383793257176876\n",
      "relation_encoders.0.layers.2.norm2.weight 0.9988323450088501 0.0021011901553720236\n",
      "relation_encoders.0.layers.2.norm2.bias 7.057375478325412e-05 0.001448668772354722\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_weight 0.00033969999640248716 0.0882052630186081\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_bias 9.738316293805838e-05 0.001877435832284391\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.weight 0.0004683301376644522 0.12415219843387604\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.bias 0.00020848089479841292 0.0023226249031722546\n",
      "relation_encoders.1.layers.0.linear1.weight 1.7480459064245224e-05 0.07908269017934799\n",
      "relation_encoders.1.layers.0.linear1.bias 0.0024210470728576183 0.07188422977924347\n",
      "relation_encoders.1.layers.0.linear2.weight -0.000344806641805917 0.07918839901685715\n",
      "relation_encoders.1.layers.0.linear2.bias -0.0022077099420130253 0.03798626735806465\n",
      "relation_encoders.1.layers.0.norm1.weight 1.0000927448272705 0.002300684107467532\n",
      "relation_encoders.1.layers.0.norm1.bias -0.0005013265181332827 0.0023292205296456814\n",
      "relation_encoders.1.layers.0.norm2.weight 0.9998543858528137 0.002357376739382744\n",
      "relation_encoders.1.layers.0.norm2.bias 1.9568597053876147e-05 0.0024010855704545975\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_weight -0.0003573056892491877 0.08944473415613174\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_bias -7.54026768845506e-05 0.0020478302612900734\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.weight 0.0032235910184681416 0.12485989183187485\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.bias -0.0001346424251096323 0.002483369316905737\n",
      "relation_encoders.1.layers.1.linear1.weight -0.0011209403164684772 0.07884939759969711\n",
      "relation_encoders.1.layers.1.linear1.bias 0.002574711572378874 0.07172580063343048\n",
      "relation_encoders.1.layers.1.linear2.weight 0.0005403372924774885 0.07870323956012726\n",
      "relation_encoders.1.layers.1.linear2.bias -0.002458691131323576 0.03794410824775696\n",
      "relation_encoders.1.layers.1.norm1.weight 1.0000420808792114 0.0023921846877783537\n",
      "relation_encoders.1.layers.1.norm1.bias -0.00013668477186001837 0.002441008575260639\n",
      "relation_encoders.1.layers.1.norm2.weight 1.0003440380096436 0.0022472464479506016\n",
      "relation_encoders.1.layers.1.norm2.bias -0.0002680472389329225 0.002247716300189495\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_weight 0.00016412354307249188 0.08852037042379379\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_bias 1.22489218483679e-05 0.0018008277984336019\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.weight -0.0002924692817032337 0.12458483874797821\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.bias 0.0005030430038459599 0.0023112522903829813\n",
      "relation_encoders.1.layers.2.linear1.weight 0.0010033503640443087 0.07946780323982239\n",
      "relation_encoders.1.layers.2.linear1.bias 0.00267311604693532 0.07225479930639267\n",
      "relation_encoders.1.layers.2.linear2.weight -0.00011570006608963013 0.07947432994842529\n",
      "relation_encoders.1.layers.2.linear2.bias -0.0024037479888647795 0.03797895461320877\n",
      "relation_encoders.1.layers.2.norm1.weight 1.0002820491790771 0.0022440124303102493\n",
      "relation_encoders.1.layers.2.norm1.bias -0.0001763871405273676 0.002313066041097045\n",
      "relation_encoders.1.layers.2.norm2.weight 0.9991641044616699 0.002375864190980792\n",
      "relation_encoders.1.layers.2.norm2.bias -9.97539609670639e-05 0.0012614631559699774\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_weight 0.0004713251255452633 0.08853153884410858\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_bias 1.9898941900464706e-05 0.0018587795784696937\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.weight 0.0029843864031136036 0.1257222443819046\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.bias 7.46905425330624e-05 0.0024989256635308266\n",
      "relation_encoders.2.layers.0.linear1.weight -5.1952607464045286e-05 0.07939866185188293\n",
      "relation_encoders.2.layers.0.linear1.bias -0.0007246220484375954 0.06886886060237885\n",
      "relation_encoders.2.layers.0.linear2.weight 0.00023337002494372427 0.07876258343458176\n",
      "relation_encoders.2.layers.0.linear2.bias 0.00887101236730814 0.03362349793314934\n",
      "relation_encoders.2.layers.0.norm1.weight 0.9999639987945557 0.002659395569935441\n",
      "relation_encoders.2.layers.0.norm1.bias -4.971408634446561e-05 0.0024092013482004404\n",
      "relation_encoders.2.layers.0.norm2.weight 1.0000956058502197 0.002401096746325493\n",
      "relation_encoders.2.layers.0.norm2.bias 0.00010038807522505522 0.002606641035526991\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_weight 0.0003990989353042096 0.08894649893045425\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_bias 9.811263589654118e-05 0.0018494544783607125\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.weight -0.001078311586752534 0.1242956891655922\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.bias -0.000278606079518795 0.0021467721089720726\n",
      "relation_encoders.2.layers.1.linear1.weight 0.00019773744861595333 0.07935182005167007\n",
      "relation_encoders.2.layers.1.linear1.bias -0.0005023508565500379 0.06868666410446167\n",
      "relation_encoders.2.layers.1.linear2.weight -0.00013905635569244623 0.07927563041448593\n",
      "relation_encoders.2.layers.1.linear2.bias 0.00917496345937252 0.03327089548110962\n",
      "relation_encoders.2.layers.1.norm1.weight 0.9999979734420776 0.00211657234467566\n",
      "relation_encoders.2.layers.1.norm1.bias -9.046951890923083e-05 0.002303327200934291\n",
      "relation_encoders.2.layers.1.norm2.weight 1.0000985860824585 0.002269493183121085\n",
      "relation_encoders.2.layers.1.norm2.bias 0.00011443539551692083 0.002177565824240446\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_weight -0.0004573901533149183 0.08848694711923599\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_bias 9.546273759042379e-06 0.00188748212531209\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.weight 2.72790202870965e-05 0.12505418062210083\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.bias 0.0002451218606438488 0.0023117316886782646\n",
      "relation_encoders.2.layers.2.linear1.weight 8.680147584527731e-05 0.07904429733753204\n",
      "relation_encoders.2.layers.2.linear1.bias -0.0005394008476287127 0.06871312111616135\n",
      "relation_encoders.2.layers.2.linear2.weight -0.00046875342377461493 0.07937928289175034\n",
      "relation_encoders.2.layers.2.linear2.bias 0.009177928790450096 0.03310573101043701\n",
      "relation_encoders.2.layers.2.norm1.weight 1.000239372253418 0.002047481946647167\n",
      "relation_encoders.2.layers.2.norm1.bias -3.1550327548757195e-05 0.0022447644732892513\n",
      "relation_encoders.2.layers.2.norm2.weight 0.9991275072097778 0.0023453100584447384\n",
      "relation_encoders.2.layers.2.norm2.bias 0.00024436519015580416 0.0012775700306519866\n",
      "classifier.0.weight -6.929288701940095e-06 0.05330759659409523\n",
      "classifier.0.bias -0.0012545359786599874 0.04204427823424339\n",
      "classifier.3.weight -0.0001551336608827114 0.06239447370171547\n",
      "classifier.3.bias -2.175290137529373e-05 0.02749512903392315\n",
      "Pretrain Epoch: 003, BSNE Loss: 0.9272\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "local_loss: 0.11228344295518922,global_loss: 0.573449432849884\n",
      "local_loss: 0.41713923685461596,global_loss: 0.6873886585235596\n",
      "local_loss: 0.3906627700088622,global_loss: 1.2314938306808472\n",
      "local_loss: 0.3536980455834383,global_loss: 0.15463905036449432\n",
      "local_loss: 0.2373503241580266,global_loss: 0.6332371234893799\n",
      "local_loss: 0.3202291602860091,global_loss: 1.0794041156768799\n",
      "local_loss: 0.47558235634839735,global_loss: 0.9955312013626099\n",
      "local_loss: 0.36197982518048677,global_loss: -0.09485398232936859\n",
      "feature_proj.0.weight 0.001581204473040998 0.15086480975151062\n",
      "feature_proj.0.bias 0.01958947442471981 0.1080106571316719\n",
      "feature_proj.1.weight 1.000259518623352 0.0025069531984627247\n",
      "feature_proj.1.bias 0.00019551216973923147 0.0023523569107055664\n",
      "norm.weight 1.0000475645065308 0.0026305203791707754\n",
      "norm.bias 0.00011505567817948759 0.002495761727914214\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_weight -0.000678950164001435 0.08819862455129623\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_bias -8.971603529062122e-05 0.0022385763004422188\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.weight -0.0009810132905840874 0.12494959682226181\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.bias 0.00014656406710855663 0.0025576893240213394\n",
      "relation_encoders.0.layers.0.linear1.weight -0.00014778652985114604 0.07913041859865189\n",
      "relation_encoders.0.layers.0.linear1.bias -0.005912869703024626 0.07288557291030884\n",
      "relation_encoders.0.layers.0.linear2.weight -0.0003793346113525331 0.07876689732074738\n",
      "relation_encoders.0.layers.0.linear2.bias -0.009309912100434303 0.03465405851602554\n",
      "relation_encoders.0.layers.0.norm1.weight 0.9999814033508301 0.0027431021444499493\n",
      "relation_encoders.0.layers.0.norm1.bias -1.2029231584165245e-05 0.0023078497033566236\n",
      "relation_encoders.0.layers.0.norm2.weight 0.9997197389602661 0.0024943340104073286\n",
      "relation_encoders.0.layers.0.norm2.bias 2.5146262487396598e-05 0.002283818321302533\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_weight -0.0001616311928955838 0.08818036317825317\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_bias 4.296398765291087e-05 0.0019335575634613633\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.weight -0.0014155604876577854 0.12536099553108215\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.bias -8.711917325854301e-05 0.002467632293701172\n",
      "relation_encoders.0.layers.1.linear1.weight 0.0007647741585969925 0.0791325569152832\n",
      "relation_encoders.0.layers.1.linear1.bias -0.005892310291528702 0.07307334989309311\n",
      "relation_encoders.0.layers.1.linear2.weight -0.00039570569060742855 0.07925117760896683\n",
      "relation_encoders.0.layers.1.linear2.bias -0.009332913905382156 0.034035783261060715\n",
      "relation_encoders.0.layers.1.norm1.weight 0.9999561905860901 0.0024608985986560583\n",
      "relation_encoders.0.layers.1.norm1.bias 0.00043116509914398193 0.0023719246964901686\n",
      "relation_encoders.0.layers.1.norm2.weight 1.0000486373901367 0.002444761572405696\n",
      "relation_encoders.0.layers.1.norm2.bias 0.0001372433762298897 0.0024827849119901657\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_weight -2.0219955331413075e-05 0.08801279962062836\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_bias 8.410580448980909e-06 0.0019211189355701208\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.weight 0.002782882656902075 0.12508636713027954\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.bias -7.863828795962036e-05 0.002561846049502492\n",
      "relation_encoders.0.layers.2.linear1.weight -0.0006352722994051874 0.07869800925254822\n",
      "relation_encoders.0.layers.2.linear1.bias -0.005883363075554371 0.07320281863212585\n",
      "relation_encoders.0.layers.2.linear2.weight -9.896923438645899e-05 0.07876656204462051\n",
      "relation_encoders.0.layers.2.linear2.bias -0.009015383198857307 0.03484347090125084\n",
      "relation_encoders.0.layers.2.norm1.weight 0.9999263882637024 0.0023431843146681786\n",
      "relation_encoders.0.layers.2.norm1.bias 0.0003009941428899765 0.0025195840280503035\n",
      "relation_encoders.0.layers.2.norm2.weight 0.9985367059707642 0.0023129889741539955\n",
      "relation_encoders.0.layers.2.norm2.bias 9.432172373635694e-05 0.0016728504560887814\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_weight 0.0003301030374132097 0.08821918070316315\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_bias 0.00014434753393288702 0.002143595600500703\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.weight 0.00046458718134090304 0.12416069209575653\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.bias 0.00023999364930205047 0.0026411451399326324\n",
      "relation_encoders.1.layers.0.linear1.weight 2.1952029783278704e-05 0.07909201830625534\n",
      "relation_encoders.1.layers.0.linear1.bias 0.00236987741664052 0.07187288254499435\n",
      "relation_encoders.1.layers.0.linear2.weight -0.0003544278151821345 0.07919710129499435\n",
      "relation_encoders.1.layers.0.linear2.bias -0.0022247727029025555 0.037936773151159286\n",
      "relation_encoders.1.layers.0.norm1.weight 1.0001323223114014 0.0026200234424322844\n",
      "relation_encoders.1.layers.0.norm1.bias -0.0005572494119405746 0.0027293518651276827\n",
      "relation_encoders.1.layers.0.norm2.weight 0.9998252391815186 0.0026684729382395744\n",
      "relation_encoders.1.layers.0.norm2.bias 5.566302570514381e-05 0.002799775218591094\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_weight -0.00035509775625541806 0.08945420384407043\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_bias -0.0001348866062471643 0.0023850835859775543\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.weight 0.0032296422868967056 0.12486264854669571\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.bias -0.00021246555843390524 0.002910643583163619\n",
      "relation_encoders.1.layers.1.linear1.weight -0.0011214499827474356 0.0788574367761612\n",
      "relation_encoders.1.layers.1.linear1.bias 0.0025690123438835144 0.07170664519071579\n",
      "relation_encoders.1.layers.1.linear2.weight 0.0005257216980680823 0.07871053367853165\n",
      "relation_encoders.1.layers.1.linear2.bias -0.002472027437761426 0.037872109562158585\n",
      "relation_encoders.1.layers.1.norm1.weight 1.000031590461731 0.0026914470363408327\n",
      "relation_encoders.1.layers.1.norm1.bias -0.00019429964595474303 0.002757253358140588\n",
      "relation_encoders.1.layers.1.norm2.weight 1.0003588199615479 0.002547270618379116\n",
      "relation_encoders.1.layers.1.norm2.bias -0.00035456038312986493 0.002556815044954419\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_weight 0.00016454060096293688 0.08852794766426086\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_bias 2.7093115932075307e-05 0.0020913383923470974\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.weight -0.0002855619532056153 0.12459838390350342\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.bias 0.000520293484441936 0.0026665206532925367\n",
      "relation_encoders.1.layers.2.linear1.weight 0.0010027778334915638 0.0794767290353775\n",
      "relation_encoders.1.layers.2.linear1.bias 0.002672953996807337 0.07228268682956696\n",
      "relation_encoders.1.layers.2.linear2.weight -0.00012636196333914995 0.07948127388954163\n",
      "relation_encoders.1.layers.2.linear2.bias -0.002437806222587824 0.03803931549191475\n",
      "relation_encoders.1.layers.2.norm1.weight 1.00031316280365 0.0025602607056498528\n",
      "relation_encoders.1.layers.2.norm1.bias -0.00016869048704393208 0.002655309857800603\n",
      "relation_encoders.1.layers.2.norm2.weight 0.9988818168640137 0.0026083537377417088\n",
      "relation_encoders.1.layers.2.norm2.bias -7.64964206609875e-05 0.0014524260768666863\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_weight 0.000481385737657547 0.08853735774755478\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_bias -1.690601493464783e-05 0.002084770007058978\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.weight 0.002978975186124444 0.1257273256778717\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.bias 8.750506094656885e-05 0.0028150344733148813\n",
      "relation_encoders.2.layers.0.linear1.weight -4.624416760634631e-05 0.07940323650836945\n",
      "relation_encoders.2.layers.0.linear1.bias -0.000779088819399476 0.06890585273504257\n",
      "relation_encoders.2.layers.0.linear2.weight 0.00021990688401274383 0.07876501232385635\n",
      "relation_encoders.2.layers.0.linear2.bias 0.008805960416793823 0.03378398343920708\n",
      "relation_encoders.2.layers.0.norm1.weight 0.999986469745636 0.0030453780200332403\n",
      "relation_encoders.2.layers.0.norm1.bias -9.245828550774604e-05 0.002717843744903803\n",
      "relation_encoders.2.layers.0.norm2.weight 1.0000896453857422 0.002744630677625537\n",
      "relation_encoders.2.layers.0.norm2.bias 0.00012335430074017495 0.0029852527659386396\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_weight 0.0003983031492680311 0.08894875645637512\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_bias 0.0001405358489137143 0.0020657237619161606\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.weight -0.0010782100725919008 0.1242993101477623\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.bias -0.00035075072082690895 0.0023898258805274963\n",
      "relation_encoders.2.layers.1.linear1.weight 0.0001966324052773416 0.07936185598373413\n",
      "relation_encoders.2.layers.1.linear1.bias -0.0004950071452185512 0.06867970526218414\n",
      "relation_encoders.2.layers.1.linear2.weight -0.00015003798762336373 0.07928764820098877\n",
      "relation_encoders.2.layers.1.linear2.bias 0.009158164262771606 0.03328511863946915\n",
      "relation_encoders.2.layers.1.norm1.weight 0.9999880194664001 0.0023931029718369246\n",
      "relation_encoders.2.layers.1.norm1.bias -0.00014378695050254464 0.002627692185342312\n",
      "relation_encoders.2.layers.1.norm2.weight 1.00011146068573 0.002584071597084403\n",
      "relation_encoders.2.layers.1.norm2.bias 5.683572817360982e-05 0.002505028387531638\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_weight -0.00045916656381450593 0.08848989754915237\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_bias -4.731967237603385e-06 0.0021103653125464916\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.weight 3.176589962095022e-05 0.12505964934825897\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.bias 0.00029747793450951576 0.0026073968037962914\n",
      "relation_encoders.2.layers.2.linear1.weight 8.840742520987988e-05 0.07905162125825882\n",
      "relation_encoders.2.layers.2.linear1.bias -0.0005291497800499201 0.06872417032718658\n",
      "relation_encoders.2.layers.2.linear2.weight -0.00047146109864115715 0.07939326763153076\n",
      "relation_encoders.2.layers.2.linear2.bias 0.009143689647316933 0.0331050381064415\n",
      "relation_encoders.2.layers.2.norm1.weight 1.0002572536468506 0.0023067432921379805\n",
      "relation_encoders.2.layers.2.norm1.bias -7.001374615356326e-05 0.00258272816427052\n",
      "relation_encoders.2.layers.2.norm2.weight 0.9989612102508545 0.0026192921213805676\n",
      "relation_encoders.2.layers.2.norm2.bias 0.0002729020779952407 0.001462477375753224\n",
      "classifier.0.weight -6.929288701940095e-06 0.05330759659409523\n",
      "classifier.0.bias -0.0012545359786599874 0.04204427823424339\n",
      "classifier.3.weight -0.0001551336608827114 0.06239447370171547\n",
      "classifier.3.bias -2.175290137529373e-05 0.02749512903392315\n",
      "Pretrain Epoch: 004, BSNE Loss: 0.9912\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "local_loss: 0.3933808141970236,global_loss: 1.2146042585372925\n",
      "local_loss: 0.3143788155497787,global_loss: 0.24782192707061768\n",
      "local_loss: 0.31010235871602443,global_loss: 0.5244397521018982\n",
      "local_loss: 0.28307826862928015,global_loss: -0.05113640055060387\n",
      "local_loss: 0.3877842523965544,global_loss: 0.9086828231811523\n",
      "local_loss: 0.47605363751120083,global_loss: 0.5130712985992432\n",
      "local_loss: 0.395236642820297,global_loss: 0.4914349913597107\n",
      "local_loss: 0.5035689219568543,global_loss: 0.45919206738471985\n",
      "feature_proj.0.weight 0.0014616682892665267 0.15091067552566528\n",
      "feature_proj.0.bias 0.019482694566249847 0.10809484869241714\n",
      "feature_proj.1.weight 1.0003056526184082 0.0028163751121610403\n",
      "feature_proj.1.bias 0.00024582166224718094 0.00263793021440506\n",
      "norm.weight 1.0000865459442139 0.002893079537898302\n",
      "norm.bias 0.00012887781485915184 0.00274599133990705\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_weight -0.0006762555567547679 0.08820924162864685\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_bias -8.139210694935173e-05 0.0024845816660672426\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.weight -0.0009722138638608158 0.12494699656963348\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.bias 0.00018972864199895412 0.0028327489271759987\n",
      "relation_encoders.0.layers.0.linear1.weight -0.0001453583245165646 0.07913590222597122\n",
      "relation_encoders.0.layers.0.linear1.bias -0.00594558147713542 0.07286205887794495\n",
      "relation_encoders.0.layers.0.linear2.weight -0.00038248993223533034 0.07877345383167267\n",
      "relation_encoders.0.layers.0.linear2.bias -0.00930597260594368 0.03472612425684929\n",
      "relation_encoders.0.layers.0.norm1.weight 0.9999792575836182 0.0030514949467033148\n",
      "relation_encoders.0.layers.0.norm1.bias -4.591509059537202e-05 0.002531229518353939\n",
      "relation_encoders.0.layers.0.norm2.weight 0.9997202157974243 0.002688130596652627\n",
      "relation_encoders.0.layers.0.norm2.bias 6.859534187242389e-05 0.002584446920081973\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_weight -0.00016222053091041744 0.08818996697664261\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_bias 5.550097193918191e-05 0.0021366653963923454\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.weight -0.0014160629361867905 0.12537358701229095\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.bias -0.0001360914611723274 0.0027778788935393095\n",
      "relation_encoders.0.layers.1.linear1.weight 0.0007664575823582709 0.07913925498723984\n",
      "relation_encoders.0.layers.1.linear1.bias -0.005917632021009922 0.07307607680559158\n",
      "relation_encoders.0.layers.1.linear2.weight -0.00039966049371287227 0.07925606518983841\n",
      "relation_encoders.0.layers.1.linear2.bias -0.009341971017420292 0.03404249995946884\n",
      "relation_encoders.0.layers.1.norm1.weight 0.9999211430549622 0.0027436765376478434\n",
      "relation_encoders.0.layers.1.norm1.bias 0.0005131745128892362 0.002703380072489381\n",
      "relation_encoders.0.layers.1.norm2.weight 1.0000474452972412 0.0027200630865991116\n",
      "relation_encoders.0.layers.1.norm2.bias 0.00013498071348294616 0.002808972727507353\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_weight -2.0806706743314862e-05 0.08802114427089691\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_bias 2.5580826331861317e-05 0.002110580913722515\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.weight 0.002768981736153364 0.12508444488048553\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.bias -0.00015974236885085702 0.0028362353332340717\n",
      "relation_encoders.0.layers.2.linear1.weight -0.0006360430852510035 0.07871036231517792\n",
      "relation_encoders.0.layers.2.linear1.bias -0.005876730661839247 0.07323120534420013\n",
      "relation_encoders.0.layers.2.linear2.weight -8.91999079613015e-05 0.0787782371044159\n",
      "relation_encoders.0.layers.2.linear2.bias -0.008985261432826519 0.034894414246082306\n",
      "relation_encoders.0.layers.2.norm1.weight 0.9999039769172668 0.0025754279922693968\n",
      "relation_encoders.0.layers.2.norm1.bias 0.000322782521834597 0.002767727244645357\n",
      "relation_encoders.0.layers.2.norm2.weight 0.9982631206512451 0.0024939875584095716\n",
      "relation_encoders.0.layers.2.norm2.bias 0.000117554867756553 0.0018899829592555761\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_weight 0.00032167474273592234 0.08824001997709274\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_bias 0.0001992870820686221 0.002413268433883786\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.weight 0.0004610840405803174 0.1241731122136116\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.bias 0.0002740546769928187 0.0029166846070438623\n",
      "relation_encoders.1.layers.0.linear1.weight 2.507452154532075e-05 0.07910008728504181\n",
      "relation_encoders.1.layers.0.linear1.bias 0.00233308388851583 0.07185745239257812\n",
      "relation_encoders.1.layers.0.linear2.weight -0.00036120039294473827 0.07920752465724945\n",
      "relation_encoders.1.layers.0.linear2.bias -0.002240891568362713 0.037909336388111115\n",
      "relation_encoders.1.layers.0.norm1.weight 1.0001472234725952 0.0029010302387177944\n",
      "relation_encoders.1.layers.0.norm1.bias -0.0006254073232412338 0.0030810197349637747\n",
      "relation_encoders.1.layers.0.norm2.weight 0.9997972249984741 0.0029487661086022854\n",
      "relation_encoders.1.layers.0.norm2.bias 0.00011384862591512501 0.0031428318470716476\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_weight -0.00035317562287673354 0.08946876227855682\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_bias -0.00019011538824997842 0.002675251569598913\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.weight 0.003234795993193984 0.12486869841814041\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.bias -0.00029100332176312804 0.003280020086094737\n",
      "relation_encoders.1.layers.1.linear1.weight -0.0011211285600438714 0.0788646936416626\n",
      "relation_encoders.1.layers.1.linear1.bias 0.002549642464146018 0.07171238213777542\n",
      "relation_encoders.1.layers.1.linear2.weight 0.0005084234289824963 0.07871691882610321\n",
      "relation_encoders.1.layers.1.linear2.bias -0.0024962956085801125 0.037830106914043427\n",
      "relation_encoders.1.layers.1.norm1.weight 1.0000157356262207 0.003015868365764618\n",
      "relation_encoders.1.layers.1.norm1.bias -0.00022387458011507988 0.00305822747759521\n",
      "relation_encoders.1.layers.1.norm2.weight 1.0003507137298584 0.0028513954021036625\n",
      "relation_encoders.1.layers.1.norm2.bias -0.000406973238568753 0.00285906158387661\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_weight 0.00016527905245311558 0.0885329470038414\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_bias 2.67375071416609e-05 0.0023580894339829683\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.weight -0.0002792046871036291 0.12461017817258835\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.bias 0.0004814675194211304 0.002982279984280467\n",
      "relation_encoders.1.layers.2.linear1.weight 0.0010030325502157211 0.07948470860719681\n",
      "relation_encoders.1.layers.2.linear1.bias 0.0026592379435896873 0.07229962944984436\n",
      "relation_encoders.1.layers.2.linear2.weight -0.00013692484935745597 0.07948752492666245\n",
      "relation_encoders.1.layers.2.linear2.bias -0.002469452330842614 0.03810789808630943\n",
      "relation_encoders.1.layers.2.norm1.weight 1.0003283023834229 0.0028590350411832333\n",
      "relation_encoders.1.layers.2.norm1.bias -0.00018982110486831516 0.0029606157913804054\n",
      "relation_encoders.1.layers.2.norm2.weight 0.9986200928688049 0.0027947938069701195\n",
      "relation_encoders.1.layers.2.norm2.bias -3.9755610487191007e-05 0.0016364753246307373\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_weight 0.000489185331389308 0.08853930234909058\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_bias -4.39950090367347e-05 0.0022949122358113527\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.weight 0.002973777474835515 0.12572968006134033\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.bias 0.00011322984209982678 0.003173802513629198\n",
      "relation_encoders.2.layers.0.linear1.weight -4.034042649436742e-05 0.07941022515296936\n",
      "relation_encoders.2.layers.0.linear1.bias -0.0008353338344022632 0.06891857832670212\n",
      "relation_encoders.2.layers.0.linear2.weight 0.0002129215863533318 0.0787709429860115\n",
      "relation_encoders.2.layers.0.linear2.bias 0.0087664145976305 0.03391328454017639\n",
      "relation_encoders.2.layers.0.norm1.weight 1.0000367164611816 0.00340519892051816\n",
      "relation_encoders.2.layers.0.norm1.bias -0.0001465243985876441 0.003044768702238798\n",
      "relation_encoders.2.layers.0.norm2.weight 1.000069499015808 0.0030343029648065567\n",
      "relation_encoders.2.layers.0.norm2.bias 0.00011858053039759398 0.0033413576893508434\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_weight 0.000396578514482826 0.08894749730825424\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_bias 0.00017777940956875682 0.0022931729909032583\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.weight -0.001075394102372229 0.12430253624916077\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.bias -0.00039438001113012433 0.0026570516638457775\n",
      "relation_encoders.2.layers.1.linear1.weight 0.00019445261568762362 0.07936953008174896\n",
      "relation_encoders.2.layers.1.linear1.bias -0.0004976489581167698 0.06868748366832733\n",
      "relation_encoders.2.layers.1.linear2.weight -0.00015632002032361925 0.0792979747056961\n",
      "relation_encoders.2.layers.1.linear2.bias 0.009145503863692284 0.033329594880342484\n",
      "relation_encoders.2.layers.1.norm1.weight 0.9999746084213257 0.002639713464304805\n",
      "relation_encoders.2.layers.1.norm1.bias -0.00015119323506951332 0.0028925531078130007\n",
      "relation_encoders.2.layers.1.norm2.weight 1.0001132488250732 0.002888315124437213\n",
      "relation_encoders.2.layers.1.norm2.bias -4.209410690236837e-06 0.0028387352358549833\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_weight -0.0004613112541846931 0.08849557489156723\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_bias 1.6598383808741346e-05 0.002330723451450467\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.weight 3.5888340789824724e-05 0.12506406009197235\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.bias 0.00036857379018329084 0.002898197388276458\n",
      "relation_encoders.2.layers.2.linear1.weight 9.031605441123247e-05 0.07905976474285126\n",
      "relation_encoders.2.layers.2.linear1.bias -0.0005162471206858754 0.0687330812215805\n",
      "relation_encoders.2.layers.2.linear2.weight -0.0004676069365814328 0.07940603792667389\n",
      "relation_encoders.2.layers.2.linear2.bias 0.009141100570559502 0.033108584582805634\n",
      "relation_encoders.2.layers.2.norm1.weight 1.0002567768096924 0.002591957338154316\n",
      "relation_encoders.2.layers.2.norm1.bias -0.00013012479757890105 0.0029064365662634373\n",
      "relation_encoders.2.layers.2.norm2.weight 0.9987802505493164 0.002869398333132267\n",
      "relation_encoders.2.layers.2.norm2.bias 0.0002894351491704583 0.0016285671154037118\n",
      "classifier.0.weight -6.929288701940095e-06 0.05330759659409523\n",
      "classifier.0.bias -0.0012545359786599874 0.04204427823424339\n",
      "classifier.3.weight -0.0001551336608827114 0.06239447370171547\n",
      "classifier.3.bias -2.175290137529373e-05 0.02749512903392315\n",
      "Pretrain Epoch: 005, BSNE Loss: 0.9215\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "local_loss: 0.20710167412737757,global_loss: 0.6923961043357849\n",
      "local_loss: 0.3384306206263221,global_loss: 0.5235993266105652\n",
      "local_loss: 0.2569647214140769,global_loss: 1.0475528240203857\n",
      "local_loss: 0.2727229424543655,global_loss: 0.44006770849227905\n",
      "local_loss: 0.31372380841340564,global_loss: 1.148079752922058\n",
      "local_loss: 0.4408056662159113,global_loss: 0.3322528600692749\n",
      "local_loss: 0.26950642149796655,global_loss: 0.67034512758255\n",
      "local_loss: 0.23230860288509306,global_loss: 0.5397438406944275\n",
      "feature_proj.0.weight 0.0013376650167629123 0.15095525979995728\n",
      "feature_proj.0.bias 0.019368024542927742 0.1081799864768982\n",
      "feature_proj.1.weight 1.0003396272659302 0.003113332437351346\n",
      "feature_proj.1.bias 0.0002873661578632891 0.002945277141407132\n",
      "norm.weight 1.0001221895217896 0.0031883029732853174\n",
      "norm.bias 0.00010761027806438506 0.003064740216359496\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_weight -0.0006741102552041411 0.08821947127580643\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_bias -6.966976070543751e-05 0.0027198572643101215\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.weight -0.0009664699900895357 0.12494765222072601\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.bias 0.00021595660655293614 0.0031137519981712103\n",
      "relation_encoders.0.layers.0.linear1.weight -0.00014200349687598646 0.07914076745510101\n",
      "relation_encoders.0.layers.0.linear1.bias -0.0059903268702328205 0.07284797728061676\n",
      "relation_encoders.0.layers.0.linear2.weight -0.00037895937566645443 0.07877698540687561\n",
      "relation_encoders.0.layers.0.linear2.bias -0.009298435412347317 0.034813422709703445\n",
      "relation_encoders.0.layers.0.norm1.weight 0.9999617338180542 0.0033276339527219534\n",
      "relation_encoders.0.layers.0.norm1.bias -7.150748569983989e-05 0.0027544633485376835\n",
      "relation_encoders.0.layers.0.norm2.weight 0.9997169971466064 0.0029315766878426075\n",
      "relation_encoders.0.layers.0.norm2.bias 0.00014032366743776947 0.0028885570354759693\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_weight -0.0001617336820345372 0.08819885551929474\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_bias 5.066573430667631e-05 0.002325473353266716\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.weight -0.0014190345536917448 0.12538540363311768\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.bias -0.0001998425432248041 0.0030660119373351336\n",
      "relation_encoders.0.layers.1.linear1.weight 0.0007673419313505292 0.07914717495441437\n",
      "relation_encoders.0.layers.1.linear1.bias -0.00592834921553731 0.07307067513465881\n",
      "relation_encoders.0.layers.1.linear2.weight -0.00040616755723021924 0.07926090806722641\n",
      "relation_encoders.0.layers.1.linear2.bias -0.00936422124505043 0.03404375910758972\n",
      "relation_encoders.0.layers.1.norm1.weight 0.9998761415481567 0.00297203054651618\n",
      "relation_encoders.0.layers.1.norm1.bias 0.0005971611244603992 0.002999166026711464\n",
      "relation_encoders.0.layers.1.norm2.weight 1.0000357627868652 0.002955509116873145\n",
      "relation_encoders.0.layers.1.norm2.bias 0.00015384833386633545 0.0030685204546898603\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_weight -2.2543061277247034e-05 0.08802969008684158\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_bias 3.958259185310453e-05 0.0022806681226938963\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.weight 0.0027616112492978573 0.1250806301832199\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.bias -0.00021118359290994704 0.0030747023411095142\n",
      "relation_encoders.0.layers.2.linear1.weight -0.0006360581610351801 0.07872258871793747\n",
      "relation_encoders.0.layers.2.linear1.bias -0.005859394557774067 0.07324725389480591\n",
      "relation_encoders.0.layers.2.linear2.weight -7.883474609116092e-05 0.07879067212343216\n",
      "relation_encoders.0.layers.2.linear2.bias -0.008950484916567802 0.034929852932691574\n",
      "relation_encoders.0.layers.2.norm1.weight 0.9998852014541626 0.002809202065691352\n",
      "relation_encoders.0.layers.2.norm1.bias 0.00029876630287617445 0.002988973865285516\n",
      "relation_encoders.0.layers.2.norm2.weight 0.9979776740074158 0.0026592963840812445\n",
      "relation_encoders.0.layers.2.norm2.bias 0.00012527512444648892 0.0020726178772747517\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_weight 0.00031497207237407565 0.08825722336769104\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_bias 0.0002457246882840991 0.002643098123371601\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.weight 0.0004583350964821875 0.12418853491544724\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.bias 0.000307071371935308 0.0031924457289278507\n",
      "relation_encoders.1.layers.0.linear1.weight 2.9579998226836324e-05 0.0791088119149208\n",
      "relation_encoders.1.layers.0.linear1.bias 0.002290792763233185 0.07185226678848267\n",
      "relation_encoders.1.layers.0.linear2.weight -0.0003678167413454503 0.07921727001667023\n",
      "relation_encoders.1.layers.0.linear2.bias -0.002256646752357483 0.037918563932180405\n",
      "relation_encoders.1.layers.0.norm1.weight 1.0001370906829834 0.0031628748401999474\n",
      "relation_encoders.1.layers.0.norm1.bias -0.0006902972236275673 0.003405720693990588\n",
      "relation_encoders.1.layers.0.norm2.weight 0.9997686147689819 0.0032375419978052378\n",
      "relation_encoders.1.layers.0.norm2.bias 0.00013747712364420295 0.003468977753072977\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_weight -0.00035173463402315974 0.0894811600446701\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_bias -0.0002424295962555334 0.002928375732153654\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.weight 0.0032368870452046394 0.12487436830997467\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.bias -0.00033631533733569086 0.0035692579112946987\n",
      "relation_encoders.1.layers.1.linear1.weight -0.0011200426379218698 0.07887203246355057\n",
      "relation_encoders.1.layers.1.linear1.bias 0.0025303373113274574 0.07171917706727982\n",
      "relation_encoders.1.layers.1.linear2.weight 0.0004994822666049004 0.07872313261032104\n",
      "relation_encoders.1.layers.1.linear2.bias -0.002505299635231495 0.03781719505786896\n",
      "relation_encoders.1.layers.1.norm1.weight 0.9999945163726807 0.003302999772131443\n",
      "relation_encoders.1.layers.1.norm1.bias -0.0002934828517027199 0.003289119340479374\n",
      "relation_encoders.1.layers.1.norm2.weight 1.0003350973129272 0.0031226137652993202\n",
      "relation_encoders.1.layers.1.norm2.bias -0.00046178727643564343 0.0031219369266182184\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_weight 0.00016580479859840125 0.08853811770677567\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_bias 3.877888957504183e-05 0.0025846532080322504\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.weight -0.00027472636429592967 0.1246216744184494\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.bias 0.00040533544961363077 0.0032166193705052137\n",
      "relation_encoders.1.layers.2.linear1.weight 0.0010018233442679048 0.07949305325746536\n",
      "relation_encoders.1.layers.2.linear1.bias 0.002643151208758354 0.07230905443429947\n",
      "relation_encoders.1.layers.2.linear2.weight -0.0001368560770060867 0.07949504256248474\n",
      "relation_encoders.1.layers.2.linear2.bias -0.0024694320745766163 0.03819052129983902\n",
      "relation_encoders.1.layers.2.norm1.weight 1.0003383159637451 0.0031014808919280767\n",
      "relation_encoders.1.layers.2.norm1.bias -0.0002156775735784322 0.00318374065682292\n",
      "relation_encoders.1.layers.2.norm2.weight 0.9984225034713745 0.0029565172735601664\n",
      "relation_encoders.1.layers.2.norm2.bias 9.23566403798759e-06 0.0017893667099997401\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_weight 0.0004951617447659373 0.0885450541973114\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_bias -6.511125684482977e-05 0.0024799304082989693\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.weight 0.002967371139675379 0.1257297694683075\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.bias 0.0001490260474383831 0.0035080653615295887\n",
      "relation_encoders.2.layers.0.linear1.weight -3.713656042236835e-05 0.07941897958517075\n",
      "relation_encoders.2.layers.0.linear1.bias -0.0008658128790557384 0.06894350796937943\n",
      "relation_encoders.2.layers.0.linear2.weight 0.00020818616030737758 0.07877882570028305\n",
      "relation_encoders.2.layers.0.linear2.bias 0.008745084516704082 0.03402245044708252\n",
      "relation_encoders.2.layers.0.norm1.weight 1.0000731945037842 0.003732958808541298\n",
      "relation_encoders.2.layers.0.norm1.bias -0.0002197417343268171 0.003354117274284363\n",
      "relation_encoders.2.layers.0.norm2.weight 1.0000590085983276 0.003292737528681755\n",
      "relation_encoders.2.layers.0.norm2.bias 0.00012472065282054245 0.0036790885496884584\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_weight 0.00039538840064778924 0.08894824981689453\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_bias 0.00020874402252957225 0.0024994234554469585\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.weight -0.0010733288945630193 0.12430864572525024\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.bias -0.00040414038812741637 0.002904006978496909\n",
      "relation_encoders.2.layers.1.linear1.weight 0.00019395234994590282 0.07937712967395782\n",
      "relation_encoders.2.layers.1.linear1.bias -0.00048282090574502945 0.06870434433221817\n",
      "relation_encoders.2.layers.1.linear2.weight -0.00017500342801213264 0.07930947840213776\n",
      "relation_encoders.2.layers.1.linear2.bias 0.009115200489759445 0.03341040760278702\n",
      "relation_encoders.2.layers.1.norm1.weight 0.9999638199806213 0.002887973329052329\n",
      "relation_encoders.2.layers.1.norm1.bias -0.00014196238771546632 0.0031597150955349207\n",
      "relation_encoders.2.layers.1.norm2.weight 1.0001122951507568 0.003195186611264944\n",
      "relation_encoders.2.layers.1.norm2.bias -5.9631689509842545e-05 0.0031865767668932676\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_weight -0.0004629571922123432 0.08850154280662537\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_bias 3.885247133439407e-05 0.0025122128427028656\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.weight 3.910379018634558e-05 0.12506818771362305\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.bias 0.00043233996257185936 0.0031619465444236994\n",
      "relation_encoders.2.layers.2.linear1.weight 9.190745186060667e-05 0.07906791567802429\n",
      "relation_encoders.2.layers.2.linear1.bias -0.0004971572197973728 0.06874118745326996\n",
      "relation_encoders.2.layers.2.linear2.weight -0.00046163314254954457 0.0794157162308693\n",
      "relation_encoders.2.layers.2.linear2.bias 0.009152310900390148 0.03312258422374725\n",
      "relation_encoders.2.layers.2.norm1.weight 1.0002553462982178 0.0028624129481613636\n",
      "relation_encoders.2.layers.2.norm1.bias -0.00014942765119485557 0.003229888156056404\n",
      "relation_encoders.2.layers.2.norm2.weight 0.9985710382461548 0.003076971974223852\n",
      "relation_encoders.2.layers.2.norm2.bias 0.0003265072882641107 0.001783429179340601\n",
      "classifier.0.weight -6.929288701940095e-06 0.05330759659409523\n",
      "classifier.0.bias -0.0012545359786599874 0.04204427823424339\n",
      "classifier.3.weight -0.0001551336608827114 0.06239447370171547\n",
      "classifier.3.bias -2.175290137529373e-05 0.02749512903392315\n",
      "Pretrain Epoch: 006, BSNE Loss: 0.9657\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "local_loss: 0.613509078534179,global_loss: 0.2769501507282257\n",
      "local_loss: 0.43992502843742687,global_loss: 0.889139711856842\n",
      "local_loss: 0.22482483902350744,global_loss: 1.2872188091278076\n",
      "local_loss: 0.499227534816833,global_loss: 1.0544975996017456\n",
      "local_loss: 0.3594636458198602,global_loss: 0.7118539810180664\n",
      "local_loss: 0.27598275685894375,global_loss: 0.8747316002845764\n",
      "local_loss: 0.3212553819187652,global_loss: 0.6163344383239746\n",
      "local_loss: 0.2699050097148768,global_loss: 1.1339693069458008\n",
      "feature_proj.0.weight 0.0012347218580543995 0.1509934961795807\n",
      "feature_proj.0.bias 0.019271202385425568 0.10828763991594315\n",
      "feature_proj.1.weight 1.0003646612167358 0.0033925080206245184\n",
      "feature_proj.1.bias 0.00032530867611058056 0.0032183020375669003\n",
      "norm.weight 1.0001490116119385 0.0034482269547879696\n",
      "norm.bias 0.0001041681389324367 0.003352872096002102\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_weight -0.0006748678861185908 0.08823182433843613\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_bias -5.0088226998923346e-05 0.0029619073029607534\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.weight -0.0009598479373380542 0.12494684010744095\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.bias 0.00025768522755242884 0.0033956465777009726\n",
      "relation_encoders.0.layers.0.linear1.weight -0.00013936040340922773 0.07914605736732483\n",
      "relation_encoders.0.layers.0.linear1.bias -0.006024707108736038 0.07286065816879272\n",
      "relation_encoders.0.layers.0.linear2.weight -0.00037107663229107857 0.0787808820605278\n",
      "relation_encoders.0.layers.0.linear2.bias -0.009287819266319275 0.034935303032398224\n",
      "relation_encoders.0.layers.0.norm1.weight 0.9999508261680603 0.0035967393778264523\n",
      "relation_encoders.0.layers.0.norm1.bias -0.00010230503539787605 0.002978170057758689\n",
      "relation_encoders.0.layers.0.norm2.weight 0.9997113943099976 0.0031594978645443916\n",
      "relation_encoders.0.layers.0.norm2.bias 0.00018855169764719903 0.0031795513350516558\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_weight -0.00016001984477043152 0.08821075409650803\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_bias 2.4564709747210145e-05 0.0025134803727269173\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.weight -0.0014236140996217728 0.12539558112621307\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.bias -0.0002543344453442842 0.003356530796736479\n",
      "relation_encoders.0.layers.1.linear1.weight 0.0007683828007429838 0.07915418595075607\n",
      "relation_encoders.0.layers.1.linear1.bias -0.005933484528213739 0.07306903600692749\n",
      "relation_encoders.0.layers.1.linear2.weight -0.0004138420335948467 0.0792652815580368\n",
      "relation_encoders.0.layers.1.linear2.bias -0.009384957142174244 0.03405744209885597\n",
      "relation_encoders.0.layers.1.norm1.weight 0.9998508095741272 0.0031435908749699593\n",
      "relation_encoders.0.layers.1.norm1.bias 0.0006319563835859299 0.003293171990662813\n",
      "relation_encoders.0.layers.1.norm2.weight 1.0000444650650024 0.003207041881978512\n",
      "relation_encoders.0.layers.1.norm2.bias 0.00019100416102446616 0.0033291743602603674\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_weight -2.3750832042424008e-05 0.0880398154258728\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_bias 5.736811363021843e-05 0.002430555410683155\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.weight 0.002756591187790036 0.1250760853290558\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.bias -0.00028169029974378645 0.003287968225777149\n",
      "relation_encoders.0.layers.2.linear1.weight -0.000637049088254571 0.07873474061489105\n",
      "relation_encoders.0.layers.2.linear1.bias -0.005836972035467625 0.07325578480958939\n",
      "relation_encoders.0.layers.2.linear2.weight -6.458156713051721e-05 0.07880257815122604\n",
      "relation_encoders.0.layers.2.linear2.bias -0.008908282965421677 0.03497233986854553\n",
      "relation_encoders.0.layers.2.norm1.weight 0.9998727440834045 0.0030796779319643974\n",
      "relation_encoders.0.layers.2.norm1.bias 0.00027654299628920853 0.0031975286547094584\n",
      "relation_encoders.0.layers.2.norm2.weight 0.9976921677589417 0.0027896782848984003\n",
      "relation_encoders.0.layers.2.norm2.bias 0.00010972503514494747 0.0022129924036562443\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_weight 0.00030335885821841657 0.08827299624681473\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_bias 0.0002971854410134256 0.002849062904715538\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.weight 0.000456452980870381 0.12420310825109482\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.bias 0.000338361831381917 0.0034541445784270763\n",
      "relation_encoders.1.layers.0.linear1.weight 3.2268144423142076e-05 0.0791182592511177\n",
      "relation_encoders.1.layers.0.linear1.bias 0.0022581981029361486 0.07185021787881851\n",
      "relation_encoders.1.layers.0.linear2.weight -0.00036890452611260116 0.0792262926697731\n",
      "relation_encoders.1.layers.0.linear2.bias -0.00226355018094182 0.03790389746427536\n",
      "relation_encoders.1.layers.0.norm1.weight 1.0001327991485596 0.003387852106243372\n",
      "relation_encoders.1.layers.0.norm1.bias -0.0007612627232447267 0.003706523682922125\n",
      "relation_encoders.1.layers.0.norm2.weight 0.9997444152832031 0.003500773338600993\n",
      "relation_encoders.1.layers.0.norm2.bias 0.00013933151785749942 0.003757450496777892\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_weight -0.00035021890653297305 0.08949214220046997\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_bias -0.000275472120847553 0.003155433339998126\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.weight 0.003236602758988738 0.12487874180078506\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.bias -0.00038097219658084214 0.0038011865690350533\n",
      "relation_encoders.1.layers.1.linear1.weight -0.0011191172525286674 0.07887940108776093\n",
      "relation_encoders.1.layers.1.linear1.bias 0.0025093278381973505 0.07171908766031265\n",
      "relation_encoders.1.layers.1.linear2.weight 0.0004826519580092281 0.07872907817363739\n",
      "relation_encoders.1.layers.1.linear2.bias -0.0025374575052410364 0.03780882805585861\n",
      "relation_encoders.1.layers.1.norm1.weight 0.9999778866767883 0.003564653219655156\n",
      "relation_encoders.1.layers.1.norm1.bias -0.00031494535505771637 0.0034669649321585894\n",
      "relation_encoders.1.layers.1.norm2.weight 1.000321388244629 0.0033289159182459116\n",
      "relation_encoders.1.layers.1.norm2.bias -0.00048659610911272466 0.0033452115021646023\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_weight 0.00016719207633286715 0.08854132145643234\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_bias 4.581499888445251e-05 0.0027597129810601473\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.weight -0.00027194886934012175 0.12462949752807617\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.bias 0.0003194730670657009 0.0034060641191899776\n",
      "relation_encoders.1.layers.2.linear1.weight 0.0010011971462517977 0.07950331270694733\n",
      "relation_encoders.1.layers.2.linear1.bias 0.0026343311183154583 0.07231342792510986\n",
      "relation_encoders.1.layers.2.linear2.weight -0.0001349499507341534 0.07950504124164581\n",
      "relation_encoders.1.layers.2.linear2.bias -0.0024813066702336073 0.03825688362121582\n",
      "relation_encoders.1.layers.2.norm1.weight 1.0003464221954346 0.0032989620231091976\n",
      "relation_encoders.1.layers.2.norm1.bias -0.00024660988128744066 0.0034240512177348137\n",
      "relation_encoders.1.layers.2.norm2.weight 0.9981940984725952 0.0031032694969326258\n",
      "relation_encoders.1.layers.2.norm2.bias 5.993929516989738e-05 0.001931740902364254\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_weight 0.000499205372761935 0.08855220675468445\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_bias -7.2044029366225e-05 0.002662092447280884\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.weight 0.002962264232337475 0.1257294863462448\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.bias 0.00017433443281333894 0.0037865247577428818\n",
      "relation_encoders.2.layers.0.linear1.weight -3.509188536554575e-05 0.07942858338356018\n",
      "relation_encoders.2.layers.0.linear1.bias -0.0008863317780196667 0.0689416453242302\n",
      "relation_encoders.2.layers.0.linear2.weight 0.00019828989752568305 0.0787881538271904\n",
      "relation_encoders.2.layers.0.linear2.bias 0.008711593225598335 0.03406815975904465\n",
      "relation_encoders.2.layers.0.norm1.weight 1.0000908374786377 0.004031674470752478\n",
      "relation_encoders.2.layers.0.norm1.bias -0.0002766809775494039 0.0036170578096061945\n",
      "relation_encoders.2.layers.0.norm2.weight 1.0000545978546143 0.0035131177864968777\n",
      "relation_encoders.2.layers.0.norm2.bias 0.00010358988947700709 0.003922480624169111\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_weight 0.0003942096955142915 0.08895058184862137\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_bias 0.00022468110546469688 0.0026575687807053328\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.weight -0.0010735795367509127 0.12431091070175171\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.bias -0.0004188625025562942 0.003149020718410611\n",
      "relation_encoders.2.layers.1.linear1.weight 0.00019339038408361375 0.07938579469919205\n",
      "relation_encoders.2.layers.1.linear1.bias -0.00046572566498070955 0.06871826946735382\n",
      "relation_encoders.2.layers.1.linear2.weight -0.00019144194084219635 0.07932089269161224\n",
      "relation_encoders.2.layers.1.linear2.bias 0.009089491330087185 0.03348344936966896\n",
      "relation_encoders.2.layers.1.norm1.weight 0.9999539852142334 0.0030942605808377266\n",
      "relation_encoders.2.layers.1.norm1.bias -0.00014854827895760536 0.003377993358299136\n",
      "relation_encoders.2.layers.1.norm2.weight 1.0001115798950195 0.0034652003087103367\n",
      "relation_encoders.2.layers.1.norm2.bias -0.00012514923582784832 0.003446184331551194\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_weight -0.0004648040048778057 0.08850662410259247\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_bias 7.290244684554636e-05 0.00268867751583457\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.weight 4.141224781051278e-05 0.125069260597229\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.bias 0.000486529606860131 0.003400309244170785\n",
      "relation_encoders.2.layers.2.linear1.weight 9.363505523651838e-05 0.07907610386610031\n",
      "relation_encoders.2.layers.2.linear1.bias -0.0004905280657112598 0.06875697523355484\n",
      "relation_encoders.2.layers.2.linear2.weight -0.0004483531811274588 0.07942359894514084\n",
      "relation_encoders.2.layers.2.linear2.bias 0.009181495755910873 0.03313467279076576\n",
      "relation_encoders.2.layers.2.norm1.weight 1.0002614259719849 0.0031142630614340305\n",
      "relation_encoders.2.layers.2.norm1.bias -0.00017532194033265114 0.0035244051832705736\n",
      "relation_encoders.2.layers.2.norm2.weight 0.9983497858047485 0.0033029632177203894\n",
      "relation_encoders.2.layers.2.norm2.bias 0.0003459531581029296 0.0019153947941958904\n",
      "classifier.0.weight -6.929288701940095e-06 0.05330759659409523\n",
      "classifier.0.bias -0.0012545359786599874 0.04204427823424339\n",
      "classifier.3.weight -0.0001551336608827114 0.06239447370171547\n",
      "classifier.3.bias -2.175290137529373e-05 0.02749512903392315\n",
      "Pretrain Epoch: 007, BSNE Loss: 1.2311\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "local_loss: 0.33949902845006225,global_loss: 0.9418428540229797\n",
      "local_loss: 0.2834578645441001,global_loss: 0.4854569733142853\n",
      "local_loss: 0.42204911778143966,global_loss: 0.586479663848877\n",
      "local_loss: 0.24526893898763177,global_loss: 0.29933810234069824\n",
      "local_loss: 0.7239597380121081,global_loss: 0.544664740562439\n",
      "local_loss: 0.14728030561087843,global_loss: 1.420450210571289\n",
      "local_loss: 0.15460636012926882,global_loss: 1.1727628707885742\n",
      "local_loss: 0.16319120588473499,global_loss: 1.3222237825393677\n",
      "feature_proj.0.weight 0.0011566919274628162 0.151020348072052\n",
      "feature_proj.0.bias 0.01919071190059185 0.10840055346488953\n",
      "feature_proj.1.weight 1.0003862380981445 0.003644673153758049\n",
      "feature_proj.1.bias 0.00034326850436627865 0.003487309440970421\n",
      "norm.weight 1.000178575515747 0.003697402309626341\n",
      "norm.bias 9.54596180235967e-05 0.0036179618909955025\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_weight -0.0006729478482156992 0.08824533224105835\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_bias -4.330325100454502e-05 0.0031842452008277178\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.weight -0.0009524665074422956 0.1249467134475708\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.bias 0.0002861869288608432 0.003664611838757992\n",
      "relation_encoders.0.layers.0.linear1.weight -0.00013579943333752453 0.07915076613426208\n",
      "relation_encoders.0.layers.0.linear1.bias -0.0060635823756456375 0.0728641003370285\n",
      "relation_encoders.0.layers.0.linear2.weight -0.0003638817579485476 0.07878502458333969\n",
      "relation_encoders.0.layers.0.linear2.bias -0.0092767383903265 0.03500989452004433\n",
      "relation_encoders.0.layers.0.norm1.weight 0.999935507774353 0.0038552735932171345\n",
      "relation_encoders.0.layers.0.norm1.bias -0.0001318536524195224 0.0031977712642401457\n",
      "relation_encoders.0.layers.0.norm2.weight 0.9997166991233826 0.003378500696271658\n",
      "relation_encoders.0.layers.0.norm2.bias 0.00024419758119620383 0.0034392604138702154\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_weight -0.00015814979269634932 0.08822344243526459\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_bias -6.589174063265091e-06 0.0026989667676389217\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.weight -0.0014271615073084831 0.12540680170059204\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.bias -0.00025422050384804606 0.003613849403336644\n",
      "relation_encoders.0.layers.1.linear1.weight 0.0007700857240706682 0.07915893942117691\n",
      "relation_encoders.0.layers.1.linear1.bias -0.005962466821074486 0.07306452840566635\n",
      "relation_encoders.0.layers.1.linear2.weight -0.0004117622156627476 0.0792677104473114\n",
      "relation_encoders.0.layers.1.linear2.bias -0.009387114085257053 0.03406725451350212\n",
      "relation_encoders.0.layers.1.norm1.weight 0.9998311400413513 0.0032991785556077957\n",
      "relation_encoders.0.layers.1.norm1.bias 0.0006731580942869186 0.0035383859649300575\n",
      "relation_encoders.0.layers.1.norm2.weight 1.0000590085983276 0.0034571997821331024\n",
      "relation_encoders.0.layers.1.norm2.bias 0.00018580316100269556 0.003574429312720895\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_weight -2.4531405870220624e-05 0.08804985880851746\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_bias 7.418846507789567e-05 0.0025743995793163776\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.weight 0.0027521129231899977 0.12507498264312744\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.bias -0.00033764896215870976 0.0034823317546397448\n",
      "relation_encoders.0.layers.2.linear1.weight -0.0006377575919032097 0.07874580472707748\n",
      "relation_encoders.0.layers.2.linear1.bias -0.005812857300043106 0.07325546443462372\n",
      "relation_encoders.0.layers.2.linear2.weight -4.406767402542755e-05 0.07881316542625427\n",
      "relation_encoders.0.layers.2.linear2.bias -0.008847594261169434 0.03503041714429855\n",
      "relation_encoders.0.layers.2.norm1.weight 0.9998719692230225 0.003292422741651535\n",
      "relation_encoders.0.layers.2.norm1.bias 0.0002646936336532235 0.003393657738342881\n",
      "relation_encoders.0.layers.2.norm2.weight 0.9974018335342407 0.0029166569001972675\n",
      "relation_encoders.0.layers.2.norm2.bias 9.537868027109653e-05 0.0023500348906964064\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_weight 0.00029340511537157 0.08829020708799362\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_bias 0.0003405421448405832 0.003057163907214999\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.weight 0.00045691741979680955 0.12421897053718567\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.bias 0.0003867560299113393 0.0037004034966230392\n",
      "relation_encoders.1.layers.0.linear1.weight 3.4195109037682414e-05 0.07912848144769669\n",
      "relation_encoders.1.layers.0.linear1.bias 0.002227747580036521 0.07184457033872604\n",
      "relation_encoders.1.layers.0.linear2.weight -0.0003737473161891103 0.07923529297113419\n",
      "relation_encoders.1.layers.0.linear2.bias -0.002282414585351944 0.03789106011390686\n",
      "relation_encoders.1.layers.0.norm1.weight 1.000135064125061 0.0036151744425296783\n",
      "relation_encoders.1.layers.0.norm1.bias -0.0008126054308377206 0.0039610243402421474\n",
      "relation_encoders.1.layers.0.norm2.weight 0.9997292160987854 0.003714479273185134\n",
      "relation_encoders.1.layers.0.norm2.bias 0.00012309112935326993 0.003988411743193865\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_weight -0.00034805352333933115 0.08950264751911163\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_bias -0.0002980969147756696 0.003352532861754298\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.weight 0.003236945252865553 0.12487979233264923\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.bias -0.00043291255133226514 0.0040071154944598675\n",
      "relation_encoders.1.layers.1.linear1.weight -0.001117961946874857 0.0788855254650116\n",
      "relation_encoders.1.layers.1.linear1.bias 0.0024796421639621258 0.07171809673309326\n",
      "relation_encoders.1.layers.1.linear2.weight 0.0004672602517530322 0.07873468101024628\n",
      "relation_encoders.1.layers.1.linear2.bias -0.0025645168498158455 0.03777943179011345\n",
      "relation_encoders.1.layers.1.norm1.weight 0.9999655485153198 0.0037948000244796276\n",
      "relation_encoders.1.layers.1.norm1.bias -0.00033996516140177846 0.0036073375958949327\n",
      "relation_encoders.1.layers.1.norm2.weight 1.0003219842910767 0.0035300899762660265\n",
      "relation_encoders.1.layers.1.norm2.bias -0.00046199746429920197 0.0035261756274849176\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_weight 0.00016726511239539832 0.08854462951421738\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_bias 6.784745346521959e-05 0.002918032929301262\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.weight -0.00026723078917711973 0.12463858723640442\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.bias 0.00023585330927744508 0.003559534903615713\n",
      "relation_encoders.1.layers.2.linear1.weight 0.0010003515053540468 0.07951392978429794\n",
      "relation_encoders.1.layers.2.linear1.bias 0.002621022518724203 0.07233094424009323\n",
      "relation_encoders.1.layers.2.linear2.weight -0.00012878331472165883 0.07951488345861435\n",
      "relation_encoders.1.layers.2.linear2.bias -0.00247459439560771 0.03831404075026512\n",
      "relation_encoders.1.layers.2.norm1.weight 1.0003613233566284 0.0034995197784155607\n",
      "relation_encoders.1.layers.2.norm1.bias -0.0002682964550331235 0.003635954577475786\n",
      "relation_encoders.1.layers.2.norm2.weight 0.9979664087295532 0.0032434873282909393\n",
      "relation_encoders.1.layers.2.norm2.bias 0.0001086090414901264 0.0020622292067855597\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_weight 0.000501906150020659 0.08855897933244705\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_bias -6.932578253326938e-05 0.002831790130585432\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.weight 0.0029595126397907734 0.12572813034057617\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.bias 0.00018744055705610663 0.0040201651863753796\n",
      "relation_encoders.2.layers.0.linear1.weight -3.285457205493003e-05 0.07943803817033768\n",
      "relation_encoders.2.layers.0.linear1.bias -0.0009151072008535266 0.06893985718488693\n",
      "relation_encoders.2.layers.0.linear2.weight 0.00018370785983279347 0.07879713177680969\n",
      "relation_encoders.2.layers.0.linear2.bias 0.008668863214552402 0.034104250371456146\n",
      "relation_encoders.2.layers.0.norm1.weight 1.000106692314148 0.004274603445082903\n",
      "relation_encoders.2.layers.0.norm1.bias -0.0003294586786068976 0.0038479873910546303\n",
      "relation_encoders.2.layers.0.norm2.weight 1.0000495910644531 0.0037084436044096947\n",
      "relation_encoders.2.layers.0.norm2.bias 9.01126186363399e-05 0.004135219845920801\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_weight 0.00039280945202335715 0.0889502465724945\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_bias 0.00023085442080628127 0.0028109869454056025\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.weight -0.0010735690593719482 0.1243111863732338\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.bias -0.0004318371065892279 0.003370164195075631\n",
      "relation_encoders.2.layers.1.linear1.weight 0.00019265356240794063 0.07939591258764267\n",
      "relation_encoders.2.layers.1.linear1.bias -0.00043471145909279585 0.06871861964464188\n",
      "relation_encoders.2.layers.1.linear2.weight -0.000212689716136083 0.07933151721954346\n",
      "relation_encoders.2.layers.1.linear2.bias 0.009045727550983429 0.03356687352061272\n",
      "relation_encoders.2.layers.1.norm1.weight 0.9999547004699707 0.0032616264652460814\n",
      "relation_encoders.2.layers.1.norm1.bias -0.00015742104733362794 0.0035670858342200518\n",
      "relation_encoders.2.layers.1.norm2.weight 1.0001105070114136 0.0037258556112647057\n",
      "relation_encoders.2.layers.1.norm2.bias -0.00017331389244645834 0.0036823099944740534\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_weight -0.00046643087989650667 0.08851154893636703\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_bias 0.00010880597255891189 0.0028451518155634403\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.weight 4.248338518664241e-05 0.1250653862953186\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.bias 0.0005158850108273327 0.0036203309427946806\n",
      "relation_encoders.2.layers.2.linear1.weight 9.482621680945158e-05 0.07908310741186142\n",
      "relation_encoders.2.layers.2.linear1.bias -0.0004928149282932281 0.06878042221069336\n",
      "relation_encoders.2.layers.2.linear2.weight -0.0004268896300345659 0.07943087071180344\n",
      "relation_encoders.2.layers.2.linear2.bias 0.009233659133315086 0.03317290544509888\n",
      "relation_encoders.2.layers.2.norm1.weight 1.0002690553665161 0.0033670023549348116\n",
      "relation_encoders.2.layers.2.norm1.bias -0.00018301530508324504 0.0038045041728764772\n",
      "relation_encoders.2.layers.2.norm2.weight 0.998134434223175 0.003503374056890607\n",
      "relation_encoders.2.layers.2.norm2.bias 0.00037873582914471626 0.0020524205174297094\n",
      "classifier.0.weight -6.929288701940095e-06 0.05330759659409523\n",
      "classifier.0.bias -0.0012545359786599874 0.04204427823424339\n",
      "classifier.3.weight -0.0001551336608827114 0.06239447370171547\n",
      "classifier.3.bias -2.175290137529373e-05 0.02749512903392315\n",
      "Pretrain Epoch: 008, BSNE Loss: 1.1566\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "local_loss: 0.21274660531939604,global_loss: 0.9186681509017944\n",
      "local_loss: 0.3634331971519674,global_loss: 0.4435974955558777\n",
      "local_loss: 0.20827086458481447,global_loss: 1.1562366485595703\n",
      "local_loss: 0.3093904780414203,global_loss: 1.0068225860595703\n",
      "local_loss: 0.10795134910833319,global_loss: 0.7774677872657776\n",
      "local_loss: 0.43828953054536285,global_loss: -0.025091616436839104\n",
      "local_loss: 0.23510206929456023,global_loss: 0.7019373178482056\n",
      "local_loss: 0.2891446056127548,global_loss: 0.6487125158309937\n",
      "feature_proj.0.weight 0.0010687046451494098 0.1510511189699173\n",
      "feature_proj.0.bias 0.019113896414637566 0.10849565267562866\n",
      "feature_proj.1.weight 1.0004143714904785 0.0038573455531150103\n",
      "feature_proj.1.bias 0.00038362425402738154 0.0037212257739156485\n",
      "norm.weight 1.0002045631408691 0.003924866672605276\n",
      "norm.bias 8.862646063789725e-05 0.003853695699945092\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_weight -0.0006676572374999523 0.08825705200433731\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_bias -5.1015882490901276e-05 0.0033809905871748924\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.weight -0.0009435853571631014 0.12494777143001556\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.bias 0.00031969352858141065 0.003933641593903303\n",
      "relation_encoders.0.layers.0.linear1.weight -0.00013163487892597914 0.07915481925010681\n",
      "relation_encoders.0.layers.0.linear1.bias -0.006107587367296219 0.07286995649337769\n",
      "relation_encoders.0.layers.0.linear2.weight -0.0003605927631724626 0.07878778129816055\n",
      "relation_encoders.0.layers.0.linear2.bias -0.009277760982513428 0.035084955394268036\n",
      "relation_encoders.0.layers.0.norm1.weight 0.9999159574508667 0.004110349807888269\n",
      "relation_encoders.0.layers.0.norm1.bias -0.0001377189182676375 0.003419505199417472\n",
      "relation_encoders.0.layers.0.norm2.weight 0.9996986389160156 0.0036066058091819286\n",
      "relation_encoders.0.layers.0.norm2.bias 0.00030965780024416745 0.0036572590470314026\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_weight -0.00015704186807852238 0.08823307603597641\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_bias -3.594195004552603e-05 0.0028657661750912666\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.weight -0.001428682473488152 0.1254187375307083\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.bias -0.0002559701388236135 0.0038336433935910463\n",
      "relation_encoders.0.layers.1.linear1.weight 0.0007717527914792299 0.07916320115327835\n",
      "relation_encoders.0.layers.1.linear1.bias -0.006002307869493961 0.0730685293674469\n",
      "relation_encoders.0.layers.1.linear2.weight -0.0004106874403078109 0.07926908135414124\n",
      "relation_encoders.0.layers.1.linear2.bias -0.009396164678037167 0.03405041992664337\n",
      "relation_encoders.0.layers.1.norm1.weight 0.9998092651367188 0.003432472236454487\n",
      "relation_encoders.0.layers.1.norm1.bias 0.0007115785265341401 0.0037333222571760416\n",
      "relation_encoders.0.layers.1.norm2.weight 1.0000649690628052 0.0036955398973077536\n",
      "relation_encoders.0.layers.1.norm2.bias 0.00019945012172684073 0.0037915310822427273\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_weight -2.6131709091714583e-05 0.0880596712231636\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_bias 8.833581523504108e-05 0.00273318774998188\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.weight 0.0027447734028100967 0.12507279217243195\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.bias -0.0003989123797509819 0.0036567742936313152\n",
      "relation_encoders.0.layers.2.linear1.weight -0.0006384062580764294 0.07875646650791168\n",
      "relation_encoders.0.layers.2.linear1.bias -0.005788033828139305 0.07325201481580734\n",
      "relation_encoders.0.layers.2.linear2.weight -2.1792860934510827e-05 0.07882320135831833\n",
      "relation_encoders.0.layers.2.linear2.bias -0.008791238069534302 0.03505224734544754\n",
      "relation_encoders.0.layers.2.norm1.weight 0.9998713731765747 0.00349832559004426\n",
      "relation_encoders.0.layers.2.norm1.bias 0.00025192557950504124 0.003565592225641012\n",
      "relation_encoders.0.layers.2.norm2.weight 0.9971358180046082 0.003076189197599888\n",
      "relation_encoders.0.layers.2.norm2.bias 8.081726264208555e-05 0.002479539019986987\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_weight 0.0002866940922103822 0.08830514550209045\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_bias 0.0003651615115813911 0.0032363738864660263\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.weight 0.0004567920113913715 0.12423217296600342\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.bias 0.0004333558026701212 0.003930971957743168\n",
      "relation_encoders.1.layers.0.linear1.weight 3.701963578350842e-05 0.0791376382112503\n",
      "relation_encoders.1.layers.0.linear1.bias 0.002192176878452301 0.07184585183858871\n",
      "relation_encoders.1.layers.0.linear2.weight -0.0003777063393499702 0.07924305647611618\n",
      "relation_encoders.1.layers.0.linear2.bias -0.002306276001036167 0.037905413657426834\n",
      "relation_encoders.1.layers.0.norm1.weight 1.0001354217529297 0.003815230680629611\n",
      "relation_encoders.1.layers.0.norm1.bias -0.0008585223695263267 0.004187817685306072\n",
      "relation_encoders.1.layers.0.norm2.weight 0.9997188448905945 0.00391489639878273\n",
      "relation_encoders.1.layers.0.norm2.bias 0.00010658886458259076 0.0042267125099897385\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_weight -0.0003457979764789343 0.08951374888420105\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_bias -0.0003149081894662231 0.003540249075740576\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.weight 0.0032376027666032314 0.12488266080617905\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.bias -0.0004838451568502933 0.00420205295085907\n",
      "relation_encoders.1.layers.1.linear1.weight -0.0011174294631928205 0.07889318466186523\n",
      "relation_encoders.1.layers.1.linear1.bias 0.002453342778608203 0.07172300666570663\n",
      "relation_encoders.1.layers.1.linear2.weight 0.00045113713713362813 0.07874137163162231\n",
      "relation_encoders.1.layers.1.linear2.bias -0.00259932572953403 0.03776538744568825\n",
      "relation_encoders.1.layers.1.norm1.weight 0.9999578595161438 0.004010154400020838\n",
      "relation_encoders.1.layers.1.norm1.bias -0.0003403037553653121 0.0037251024041324854\n",
      "relation_encoders.1.layers.1.norm2.weight 1.000319480895996 0.003697809297591448\n",
      "relation_encoders.1.layers.1.norm2.bias -0.0004273633530829102 0.0036491514183580875\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_weight 0.00016670329205226153 0.08854860067367554\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_bias 9.65389481279999e-05 0.0030746799893677235\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.weight -0.0002619823790155351 0.12464484572410583\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.bias 0.00015133499982766807 0.003696775296702981\n",
      "relation_encoders.1.layers.2.linear1.weight 0.0009999467292800546 0.07952355593442917\n",
      "relation_encoders.1.layers.2.linear1.bias 0.0026185312308371067 0.07234378904104233\n",
      "relation_encoders.1.layers.2.linear2.weight -0.00013078073970973492 0.0795242115855217\n",
      "relation_encoders.1.layers.2.linear2.bias -0.002493523992598057 0.03837068751454353\n",
      "relation_encoders.1.layers.2.norm1.weight 1.000373363494873 0.003673013299703598\n",
      "relation_encoders.1.layers.2.norm1.bias -0.0002752215077634901 0.003824961371719837\n",
      "relation_encoders.1.layers.2.norm2.weight 0.997748076915741 0.003369900630787015\n",
      "relation_encoders.1.layers.2.norm2.bias 0.00014038382505532354 0.002204031916335225\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_weight 0.0005014392081648111 0.08856608718633652\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_bias -6.043040411896072e-05 0.0030130401719361544\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.weight 0.0029561438132077456 0.1257294863462448\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.bias 0.00019157156930305064 0.004248600918799639\n",
      "relation_encoders.2.layers.0.linear1.weight -2.9965274734422565e-05 0.07944772392511368\n",
      "relation_encoders.2.layers.0.linear1.bias -0.00094794319011271 0.06893383711576462\n",
      "relation_encoders.2.layers.0.linear2.weight 0.00017140604904852808 0.07880645245313644\n",
      "relation_encoders.2.layers.0.linear2.bias 0.008632179349660873 0.034117717295885086\n",
      "relation_encoders.2.layers.0.norm1.weight 1.0001542568206787 0.004493726883083582\n",
      "relation_encoders.2.layers.0.norm1.bias -0.0003843457670882344 0.004059295170009136\n",
      "relation_encoders.2.layers.0.norm2.weight 1.0000529289245605 0.0038805240765213966\n",
      "relation_encoders.2.layers.0.norm2.bias 9.50106477830559e-05 0.004317778162658215\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_weight 0.0003925116907339543 0.08895357698202133\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_bias 0.0002323214866919443 0.00296997232362628\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.weight -0.0010740731377154589 0.12431138753890991\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.bias -0.0004331385425757617 0.003573830472305417\n",
      "relation_encoders.2.layers.1.linear1.weight 0.00019200847600586712 0.07940589636564255\n",
      "relation_encoders.2.layers.1.linear1.bias -0.00040728074964135885 0.06871901452541351\n",
      "relation_encoders.2.layers.1.linear2.weight -0.00022113093291409314 0.07934123277664185\n",
      "relation_encoders.2.layers.1.linear2.bias 0.009035047143697739 0.033621400594711304\n",
      "relation_encoders.2.layers.1.norm1.weight 0.9999527931213379 0.003409731900319457\n",
      "relation_encoders.2.layers.1.norm1.bias -0.00015626693493686616 0.003752009943127632\n",
      "relation_encoders.2.layers.1.norm2.weight 1.0001044273376465 0.003961475100368261\n",
      "relation_encoders.2.layers.1.norm2.bias -0.00019740499556064606 0.003912151791155338\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_weight -0.0004680361016653478 0.08851362764835358\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_bias 0.00014006084529682994 0.002999724354594946\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.weight 4.6219269279390574e-05 0.12505994737148285\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.bias 0.0005514667136594653 0.003842306789010763\n",
      "relation_encoders.2.layers.2.linear1.weight 9.58156306296587e-05 0.07908977568149567\n",
      "relation_encoders.2.layers.2.linear1.bias -0.0004917642800137401 0.06879176199436188\n",
      "relation_encoders.2.layers.2.linear2.weight -0.00040130122215487063 0.07943778485059738\n",
      "relation_encoders.2.layers.2.linear2.bias 0.009294012561440468 0.03319317102432251\n",
      "relation_encoders.2.layers.2.norm1.weight 1.0002906322479248 0.0035919088404625654\n",
      "relation_encoders.2.layers.2.norm1.bias -0.00017816279432736337 0.00406525656580925\n",
      "relation_encoders.2.layers.2.norm2.weight 0.9979195594787598 0.0037185419350862503\n",
      "relation_encoders.2.layers.2.norm2.bias 0.0004165812861174345 0.002182634314522147\n",
      "classifier.0.weight -6.929288701940095e-06 0.05330759659409523\n",
      "classifier.0.bias -0.0012545359786599874 0.04204427823424339\n",
      "classifier.3.weight -0.0001551336608827114 0.06239447370171547\n",
      "classifier.3.bias -2.175290137529373e-05 0.02749512903392315\n",
      "Pretrain Epoch: 009, BSNE Loss: 0.9741\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "local_loss: 0.4753040691633775,global_loss: 0.19629605114459991\n",
      "local_loss: 0.4831882619973511,global_loss: 0.35614103078842163\n",
      "local_loss: 0.13107668768583094,global_loss: 0.7585963010787964\n",
      "local_loss: 0.22543309996961938,global_loss: 0.8779683709144592\n",
      "local_loss: 0.23123617117884088,global_loss: 0.5965922474861145\n",
      "local_loss: 0.4363421149229563,global_loss: 0.47525396943092346\n",
      "local_loss: 0.1274046626012548,global_loss: 1.0036695003509521\n",
      "local_loss: 0.30041515172760397,global_loss: 0.9026778936386108\n",
      "feature_proj.0.weight 0.0009861704893410206 0.15107741951942444\n",
      "feature_proj.0.bias 0.019042257219552994 0.10859644412994385\n",
      "feature_proj.1.weight 1.0004401206970215 0.004047834314405918\n",
      "feature_proj.1.bias 0.00040793255902826786 0.003938664682209492\n",
      "norm.weight 1.000233769416809 0.0041179778054356575\n",
      "norm.bias 7.768326031509787e-05 0.0040961443446576595\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_weight -0.0006655865581706166 0.08826754242181778\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_bias -4.623515997081995e-05 0.0035791918635368347\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.weight -0.000937251781579107 0.12495078146457672\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.bias 0.0003557134768925607 0.00418405095115304\n",
      "relation_encoders.0.layers.0.linear1.weight -0.00012771232286468148 0.07915962487459183\n",
      "relation_encoders.0.layers.0.linear1.bias -0.006155109032988548 0.07287025451660156\n",
      "relation_encoders.0.layers.0.linear2.weight -0.0003519484307616949 0.07878907769918442\n",
      "relation_encoders.0.layers.0.linear2.bias -0.009267250075936317 0.03514925017952919\n",
      "relation_encoders.0.layers.0.norm1.weight 0.9999071359634399 0.004341372288763523\n",
      "relation_encoders.0.layers.0.norm1.bias -0.00015244653332047164 0.0036241409834474325\n",
      "relation_encoders.0.layers.0.norm2.weight 0.9996781349182129 0.003824452171102166\n",
      "relation_encoders.0.layers.0.norm2.bias 0.00035159022081643343 0.0038610948249697685\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_weight -0.00015660433564335108 0.08824256807565689\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_bias -4.8806316044647247e-05 0.003013345180079341\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.weight -0.001429736614227295 0.12543104588985443\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.bias -0.0002639167942106724 0.004031142685562372\n",
      "relation_encoders.0.layers.1.linear1.weight 0.0007727432530373335 0.07916804403066635\n",
      "relation_encoders.0.layers.1.linear1.bias -0.0060428716242313385 0.07308276742696762\n",
      "relation_encoders.0.layers.1.linear2.weight -0.0004011890268884599 0.0792698934674263\n",
      "relation_encoders.0.layers.1.linear2.bias -0.009383034892380238 0.03402802720665932\n",
      "relation_encoders.0.layers.1.norm1.weight 0.999799370765686 0.0035531611647456884\n",
      "relation_encoders.0.layers.1.norm1.bias 0.0007390370010398328 0.0039015011861920357\n",
      "relation_encoders.0.layers.1.norm2.weight 1.0000755786895752 0.0039021139964461327\n",
      "relation_encoders.0.layers.1.norm2.bias 0.0002152256784029305 0.003990354016423225\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_weight -2.768124613794498e-05 0.08806687593460083\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_bias 8.724622603040189e-05 0.002873774617910385\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.weight 0.0027370844036340714 0.1250683218240738\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.bias -0.0004686788306571543 0.003834423841908574\n",
      "relation_encoders.0.layers.2.linear1.weight -0.0006393385701812804 0.07876592874526978\n",
      "relation_encoders.0.layers.2.linear1.bias -0.00576375424861908 0.07326870411634445\n",
      "relation_encoders.0.layers.2.linear2.weight -8.489107131026685e-06 0.07883244007825851\n",
      "relation_encoders.0.layers.2.linear2.bias -0.008759996853768826 0.03508451208472252\n",
      "relation_encoders.0.layers.2.norm1.weight 0.999870240688324 0.003668382065370679\n",
      "relation_encoders.0.layers.2.norm1.bias 0.00023718408192507923 0.0037220099475234747\n",
      "relation_encoders.0.layers.2.norm2.weight 0.996855616569519 0.0032198335975408554\n",
      "relation_encoders.0.layers.2.norm2.bias 6.036536069586873e-05 0.002598124323412776\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_weight 0.000280656706308946 0.08831897377967834\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_bias 0.0003837206168100238 0.0034176758490502834\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.weight 0.00045645839418284595 0.12424514442682266\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.bias 0.00046401339932344854 0.004146389197558165\n",
      "relation_encoders.1.layers.0.linear1.weight 3.944654599763453e-05 0.0791463553905487\n",
      "relation_encoders.1.layers.0.linear1.bias 0.0021616658195853233 0.07185057550668716\n",
      "relation_encoders.1.layers.0.linear2.weight -0.00037716206861659884 0.07925042510032654\n",
      "relation_encoders.1.layers.0.linear2.bias -0.0023218621499836445 0.03792230412364006\n",
      "relation_encoders.1.layers.0.norm1.weight 1.00014066696167 0.004025375936180353\n",
      "relation_encoders.1.layers.0.norm1.bias -0.0009022928425110877 0.00439525255933404\n",
      "relation_encoders.1.layers.0.norm2.weight 0.9997144341468811 0.004099821671843529\n",
      "relation_encoders.1.layers.0.norm2.bias 8.436522330157459e-05 0.004439139273017645\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_weight -0.00034384155878797174 0.08952387422323227\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_bias -0.00032858637860044837 0.003722723573446274\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.weight 0.0032402342185378075 0.12488307803869247\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.bias -0.0005418782820925117 0.004395098425447941\n",
      "relation_encoders.1.layers.1.linear1.weight -0.001116857398301363 0.07889995723962784\n",
      "relation_encoders.1.layers.1.linear1.bias 0.002423192374408245 0.07171817123889923\n",
      "relation_encoders.1.layers.1.linear2.weight 0.00043548858957365155 0.07874816656112671\n",
      "relation_encoders.1.layers.1.linear2.bias -0.0026327650994062424 0.03774566948413849\n",
      "relation_encoders.1.layers.1.norm1.weight 0.9999496340751648 0.004188808146864176\n",
      "relation_encoders.1.layers.1.norm1.bias -0.0003438711282797158 0.0038552014157176018\n",
      "relation_encoders.1.layers.1.norm2.weight 1.0003174543380737 0.00384537223726511\n",
      "relation_encoders.1.layers.1.norm2.bias -0.000384170503821224 0.0037764180451631546\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_weight 0.00016712473006919026 0.08855128288269043\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_bias 0.0001178774909931235 0.0032186643220484257\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.weight -0.00025871535763144493 0.12465271353721619\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.bias 5.5809825425967574e-05 0.003818628378212452\n",
      "relation_encoders.1.layers.2.linear1.weight 0.0009996573207899928 0.0795314759016037\n",
      "relation_encoders.1.layers.2.linear1.bias 0.002601130399852991 0.0723641887307167\n",
      "relation_encoders.1.layers.2.linear2.weight -0.00012812486966140568 0.07953142374753952\n",
      "relation_encoders.1.layers.2.linear2.bias -0.002502787159755826 0.03840962424874306\n",
      "relation_encoders.1.layers.2.norm1.weight 1.00038480758667 0.0038572398480027914\n",
      "relation_encoders.1.layers.2.norm1.bias -0.0003029243089258671 0.003993412014096975\n",
      "relation_encoders.1.layers.2.norm2.weight 0.9975311756134033 0.0034999470226466656\n",
      "relation_encoders.1.layers.2.norm2.bias 0.0001440062769688666 0.0023402704391628504\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_weight 0.0005005456623621285 0.08857439458370209\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_bias -4.9796304665505886e-05 0.003208210226148367\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.weight 0.0029544737190008163 0.12573252618312836\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.bias 0.00018725378322415054 0.004475628025829792\n",
      "relation_encoders.2.layers.0.linear1.weight -2.6846653781831264e-05 0.07945754379034042\n",
      "relation_encoders.2.layers.0.linear1.bias -0.0009811475174501538 0.06892814487218857\n",
      "relation_encoders.2.layers.0.linear2.weight 0.0001645158918108791 0.07881561666727066\n",
      "relation_encoders.2.layers.0.linear2.bias 0.00860810186713934 0.034134116023778915\n",
      "relation_encoders.2.layers.0.norm1.weight 1.000195860862732 0.004691706504672766\n",
      "relation_encoders.2.layers.0.norm1.bias -0.0004169473540969193 0.004275817424058914\n",
      "relation_encoders.2.layers.0.norm2.weight 1.0000633001327515 0.0040349168702960014\n",
      "relation_encoders.2.layers.0.norm2.bias 0.00010622170520946383 0.0045018624514341354\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_weight 0.00039212260162457824 0.0889582633972168\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_bias 0.0002333549491595477 0.0031198314391076565\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.weight -0.0010747960768640041 0.12431149184703827\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.bias -0.0004112239694222808 0.003809418762102723\n",
      "relation_encoders.2.layers.1.linear1.weight 0.00019225204596295953 0.07941465824842453\n",
      "relation_encoders.2.layers.1.linear1.bias -0.0003918405855074525 0.06870691478252411\n",
      "relation_encoders.2.layers.1.linear2.weight -0.00023190751380752772 0.07934948056936264\n",
      "relation_encoders.2.layers.1.linear2.bias 0.00901365838944912 0.0336754284799099\n",
      "relation_encoders.2.layers.1.norm1.weight 0.9999502301216125 0.003567314939573407\n",
      "relation_encoders.2.layers.1.norm1.bias -0.00013973013847135007 0.003933747299015522\n",
      "relation_encoders.2.layers.1.norm2.weight 1.000110387802124 0.0041817789897322655\n",
      "relation_encoders.2.layers.1.norm2.bias -0.0002259285975014791 0.004139794036746025\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_weight -0.0004691992362495512 0.08851806819438934\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_bias 0.00016889335529413074 0.003156059654429555\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.weight 5.111930659040809e-05 0.1250590831041336\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.bias 0.0005895302165299654 0.004051306750625372\n",
      "relation_encoders.2.layers.2.linear1.weight 9.608728578314185e-05 0.0790969580411911\n",
      "relation_encoders.2.layers.2.linear1.bias -0.0004925725515931845 0.06880109757184982\n",
      "relation_encoders.2.layers.2.linear2.weight -0.00037978749605827034 0.07944490760564804\n",
      "relation_encoders.2.layers.2.linear2.bias 0.009337488561868668 0.03321434557437897\n",
      "relation_encoders.2.layers.2.norm1.weight 1.0003148317337036 0.0037851394154131413\n",
      "relation_encoders.2.layers.2.norm1.bias -0.0001942854723893106 0.004313246347010136\n",
      "relation_encoders.2.layers.2.norm2.weight 0.9977110028266907 0.003920980729162693\n",
      "relation_encoders.2.layers.2.norm2.bias 0.0004663962172344327 0.002309098606929183\n",
      "classifier.0.weight -6.929288701940095e-06 0.05330759659409523\n",
      "classifier.0.bias -0.0012545359786599874 0.04204427823424339\n",
      "classifier.3.weight -0.0001551336608827114 0.06239447370171547\n",
      "classifier.3.bias -2.175290137529373e-05 0.02749512903392315\n",
      "Pretrain Epoch: 010, BSNE Loss: 0.9472\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "local_loss: 0.34857267728161573,global_loss: 1.0423550605773926\n",
      "local_loss: 0.29750104183104303,global_loss: 1.060499906539917\n",
      "local_loss: 0.5267058119641014,global_loss: 0.28736448287963867\n",
      "local_loss: 0.2201397416022505,global_loss: 0.6307422518730164\n",
      "local_loss: 0.27139109256595634,global_loss: 1.1352585554122925\n",
      "local_loss: 0.5767770926737985,global_loss: 0.242280513048172\n",
      "local_loss: 0.44185984168277775,global_loss: 0.4511605501174927\n",
      "local_loss: 0.46730320177240287,global_loss: -0.01596430502831936\n",
      "feature_proj.0.weight 0.0009021717123687267 0.15110760927200317\n",
      "feature_proj.0.bias 0.018962206318974495 0.10868249088525772\n",
      "feature_proj.1.weight 1.0004597902297974 0.004235635977238417\n",
      "feature_proj.1.bias 0.00043537819874472916 0.004156625363975763\n",
      "norm.weight 1.0002515316009521 0.004302320070564747\n",
      "norm.bias 6.0596008552238345e-05 0.004335700999945402\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_weight -0.0006663619424216449 0.08827588707208633\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_bias -3.6492638173513114e-05 0.0037562442012131214\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.weight -0.0009386201272718608 0.12495443224906921\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.bias 0.000376374926418066 0.004395726136863232\n",
      "relation_encoders.0.layers.0.linear1.weight -0.00012457251432351768 0.07916504144668579\n",
      "relation_encoders.0.layers.0.linear1.bias -0.006194462068378925 0.0728771835565567\n",
      "relation_encoders.0.layers.0.linear2.weight -0.0003444202011451125 0.07879208773374557\n",
      "relation_encoders.0.layers.0.linear2.bias -0.009254018776118755 0.03519930690526962\n",
      "relation_encoders.0.layers.0.norm1.weight 0.9999034404754639 0.00454346789047122\n",
      "relation_encoders.0.layers.0.norm1.bias -0.00016386411152780056 0.00378907797858119\n",
      "relation_encoders.0.layers.0.norm2.weight 0.9996667504310608 0.004042922984808683\n",
      "relation_encoders.0.layers.0.norm2.bias 0.00039398815715685487 0.004060627426952124\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_weight -0.00015638202603440732 0.0882541686296463\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_bias -5.809150025015697e-05 0.003161404049023986\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.weight -0.0014300842303782701 0.125443235039711\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.bias -0.0002853909390978515 0.004232947714626789\n",
      "relation_encoders.0.layers.1.linear1.weight 0.0007735824910923839 0.07917380332946777\n",
      "relation_encoders.0.layers.1.linear1.bias -0.006079184822738171 0.07309940457344055\n",
      "relation_encoders.0.layers.1.linear2.weight -0.00039747546543367207 0.07927210628986359\n",
      "relation_encoders.0.layers.1.linear2.bias -0.009388616308569908 0.03402221202850342\n",
      "relation_encoders.0.layers.1.norm1.weight 0.9997935891151428 0.0036855849903076887\n",
      "relation_encoders.0.layers.1.norm1.bias 0.000767254619859159 0.004046336747705936\n",
      "relation_encoders.0.layers.1.norm2.weight 1.00008225440979 0.004100579768419266\n",
      "relation_encoders.0.layers.1.norm2.bias 0.0002350170980207622 0.004155334550887346\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_weight -2.8816022677347064e-05 0.08807311207056046\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_bias 8.899753447622061e-05 0.00300709274597466\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.weight 0.002726572100073099 0.12506155669689178\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.bias -0.0005524861626327038 0.003980784676969051\n",
      "relation_encoders.0.layers.2.linear1.weight -0.0006395828677341342 0.07877437025308609\n",
      "relation_encoders.0.layers.2.linear1.bias -0.005747429560869932 0.07327999174594879\n",
      "relation_encoders.0.layers.2.linear2.weight 6.94664049660787e-06 0.07884187251329422\n",
      "relation_encoders.0.layers.2.linear2.bias -0.008719632402062416 0.03511824086308479\n",
      "relation_encoders.0.layers.2.norm1.weight 0.9998694658279419 0.003846491454169154\n",
      "relation_encoders.0.layers.2.norm1.bias 0.00021148761152289808 0.003873983398079872\n",
      "relation_encoders.0.layers.2.norm2.weight 0.9965841770172119 0.003362508723512292\n",
      "relation_encoders.0.layers.2.norm2.bias 3.219943027943373e-05 0.0027154048439115286\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_weight 0.0002730805135797709 0.08833055198192596\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_bias 0.0004033556906506419 0.0035775352735072374\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.weight 0.0004570555465761572 0.12425629794597626\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.bias 0.0005043611163273454 0.00435343012213707\n",
      "relation_encoders.1.layers.0.linear1.weight 4.2057508835569024e-05 0.0791560560464859\n",
      "relation_encoders.1.layers.0.linear1.bias 0.002132444642484188 0.07184731215238571\n",
      "relation_encoders.1.layers.0.linear2.weight -0.00038102659163996577 0.07925831526517868\n",
      "relation_encoders.1.layers.0.linear2.bias -0.002345580607652664 0.03793267905712128\n",
      "relation_encoders.1.layers.0.norm1.weight 1.000154733657837 0.0042243595235049725\n",
      "relation_encoders.1.layers.0.norm1.bias -0.0009364349534735084 0.004615452606230974\n",
      "relation_encoders.1.layers.0.norm2.weight 0.9997169971466064 0.004275308921933174\n",
      "relation_encoders.1.layers.0.norm2.bias 7.03211480868049e-05 0.004637043457478285\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_weight -0.00034212516038678586 0.08953574299812317\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_bias -0.00034539622720330954 0.003893170040100813\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.weight 0.0032426072284579277 0.12488511204719543\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.bias -0.0005955775850452483 0.004575842060148716\n",
      "relation_encoders.1.layers.1.linear1.weight -0.001116032712161541 0.0789070650935173\n",
      "relation_encoders.1.layers.1.linear1.bias 0.0023923323024064302 0.07172244787216187\n",
      "relation_encoders.1.layers.1.linear2.weight 0.0004220529517624527 0.07875445485115051\n",
      "relation_encoders.1.layers.1.linear2.bias -0.0026594260707497597 0.03769651800394058\n",
      "relation_encoders.1.layers.1.norm1.weight 0.9999489188194275 0.004333727993071079\n",
      "relation_encoders.1.layers.1.norm1.bias -0.000339675898430869 0.0039932155050337315\n",
      "relation_encoders.1.layers.1.norm2.weight 1.0003098249435425 0.003990515135228634\n",
      "relation_encoders.1.layers.1.norm2.bias -0.0003521949693094939 0.003911010921001434\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_weight 0.00016769015928730369 0.08855466544628143\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_bias 0.00013105382095091045 0.003360792761668563\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.weight -0.0002559204585850239 0.12466077506542206\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.bias -1.4752804418094456e-05 0.003913387190550566\n",
      "relation_encoders.1.layers.2.linear1.weight 0.000999369891360402 0.07953915745019913\n",
      "relation_encoders.1.layers.2.linear1.bias 0.0025870914105325937 0.07237806171178818\n",
      "relation_encoders.1.layers.2.linear2.weight -0.0001194493961520493 0.07953806966543198\n",
      "relation_encoders.1.layers.2.linear2.bias -0.002490295097231865 0.0384196974337101\n",
      "relation_encoders.1.layers.2.norm1.weight 1.0003910064697266 0.0040342421270906925\n",
      "relation_encoders.1.layers.2.norm1.bias -0.0003139747423119843 0.004150311462581158\n",
      "relation_encoders.1.layers.2.norm2.weight 0.9973117113113403 0.003642455907538533\n",
      "relation_encoders.1.layers.2.norm2.bias 0.00014398164057638496 0.0024718379136174917\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_weight 0.0004988507716916502 0.08858287334442139\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_bias -3.622523945523426e-05 0.003409574506804347\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.weight 0.002951172413304448 0.12573733925819397\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.bias 0.00019546932890079916 0.004697669763118029\n",
      "relation_encoders.2.layers.0.linear1.weight -2.3799322661943734e-05 0.07946794480085373\n",
      "relation_encoders.2.layers.0.linear1.bias -0.001012384076602757 0.06892554461956024\n",
      "relation_encoders.2.layers.0.linear2.weight 0.00015741170500405133 0.07882458716630936\n",
      "relation_encoders.2.layers.0.linear2.bias 0.008585482835769653 0.03413078188896179\n",
      "relation_encoders.2.layers.0.norm1.weight 1.0002338886260986 0.004859983921051025\n",
      "relation_encoders.2.layers.0.norm1.bias -0.00043847732013091445 0.004446470178663731\n",
      "relation_encoders.2.layers.0.norm2.weight 1.0000680685043335 0.004165184684097767\n",
      "relation_encoders.2.layers.0.norm2.bias 0.00010987187852151692 0.004644236993044615\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_weight 0.0003915116540156305 0.08896130323410034\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_bias 0.00023194988898467273 0.00322299194522202\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.weight -0.0010748489294201136 0.12430931627750397\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.bias -0.00039488327456638217 0.0040074484422802925\n",
      "relation_encoders.2.layers.1.linear1.weight 0.0001926197437569499 0.07942184805870056\n",
      "relation_encoders.2.layers.1.linear1.bias -0.0003917540889233351 0.0686987116932869\n",
      "relation_encoders.2.layers.1.linear2.weight -0.0002410565793979913 0.07935656607151031\n",
      "relation_encoders.2.layers.1.linear2.bias 0.00899333506822586 0.033715080469846725\n",
      "relation_encoders.2.layers.1.norm1.weight 0.9999499917030334 0.0037302267737686634\n",
      "relation_encoders.2.layers.1.norm1.bias -0.00011066964361816645 0.0040984065271914005\n",
      "relation_encoders.2.layers.1.norm2.weight 1.0001108646392822 0.0044114175252616405\n",
      "relation_encoders.2.layers.1.norm2.bias -0.00024349385057576 0.004360232502222061\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_weight -0.0004694079398177564 0.08852279186248779\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_bias 0.00018606180674396455 0.003286270424723625\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.weight 5.543534643948078e-05 0.12506039440631866\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.bias 0.0006228125421330333 0.004225820302963257\n",
      "relation_encoders.2.layers.2.linear1.weight 9.626004612073302e-05 0.0791047215461731\n",
      "relation_encoders.2.layers.2.linear1.bias -0.0004842296475544572 0.06882040947675705\n",
      "relation_encoders.2.layers.2.linear2.weight -0.00035985332215204835 0.07945343852043152\n",
      "relation_encoders.2.layers.2.linear2.bias 0.009378724731504917 0.033243145793676376\n",
      "relation_encoders.2.layers.2.norm1.weight 1.0003161430358887 0.003985922317951918\n",
      "relation_encoders.2.layers.2.norm1.bias -0.00017638439021538943 0.004560190252959728\n",
      "relation_encoders.2.layers.2.norm2.weight 0.997467577457428 0.00409650569781661\n",
      "relation_encoders.2.layers.2.norm2.bias 0.0005042929551564157 0.0024184000212699175\n",
      "classifier.0.weight -6.929288701940095e-06 0.05330759659409523\n",
      "classifier.0.bias -0.0012545359786599874 0.04204427823424339\n",
      "classifier.3.weight -0.0001551336608827114 0.06239447370171547\n",
      "classifier.3.bias -2.175290137529373e-05 0.02749512903392315\n",
      "Pretrain Epoch: 011, BSNE Loss: 0.9980\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "local_loss: 0.27701568951414574,global_loss: 0.9889050722122192\n",
      "local_loss: 0.19578653543180927,global_loss: 0.5331470966339111\n",
      "local_loss: 0.2872095752340156,global_loss: 0.336047500371933\n",
      "local_loss: 0.39817284059887414,global_loss: 1.1348211765289307\n",
      "local_loss: 0.2953602580305122,global_loss: -0.24337434768676758\n",
      "local_loss: 0.33609386086591825,global_loss: 1.4632000923156738\n",
      "local_loss: 0.4533187054702519,global_loss: 0.8734606504440308\n",
      "local_loss: 0.2717377773091972,global_loss: 1.0834187269210815\n",
      "feature_proj.0.weight 0.0008245622739195824 0.15113763511180878\n",
      "feature_proj.0.bias 0.018881652504205704 0.10875876992940903\n",
      "feature_proj.1.weight 1.0004886388778687 0.004393227398395538\n",
      "feature_proj.1.bias 0.00047624571016058326 0.004340402316302061\n",
      "norm.weight 1.0002837181091309 0.004460623487830162\n",
      "norm.bias 5.177772254683077e-05 0.004535443149507046\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_weight -0.0006649968563579023 0.08828289061784744\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_bias -3.4827298804884776e-05 0.003922071773558855\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.weight -0.0009391963249072433 0.12495599687099457\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.bias 0.00038525921991094947 0.004592814017087221\n",
      "relation_encoders.0.layers.0.linear1.weight -0.00012169520050520077 0.07917238771915436\n",
      "relation_encoders.0.layers.0.linear1.bias -0.006222734227776527 0.07289689034223557\n",
      "relation_encoders.0.layers.0.linear2.weight -0.00034009735099971294 0.07879645377397537\n",
      "relation_encoders.0.layers.0.linear2.bias -0.009258884005248547 0.03522871062159538\n",
      "relation_encoders.0.layers.0.norm1.weight 0.9999051094055176 0.004742410499602556\n",
      "relation_encoders.0.layers.0.norm1.bias -0.00019272082136012614 0.003965072333812714\n",
      "relation_encoders.0.layers.0.norm2.weight 0.9996592998504639 0.004236006643623114\n",
      "relation_encoders.0.layers.0.norm2.bias 0.0004443521611392498 0.004230317194014788\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_weight -0.00015592020645271987 0.0882643535733223\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_bias -8.049511961871758e-05 0.003304527373984456\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.weight -0.001431531272828579 0.12545476853847504\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.bias -0.00030470424098894 0.00438958453014493\n",
      "relation_encoders.0.layers.1.linear1.weight 0.0007739504799246788 0.07918060570955276\n",
      "relation_encoders.0.layers.1.linear1.bias -0.006101374514400959 0.07310904562473297\n",
      "relation_encoders.0.layers.1.linear2.weight -0.00038828194374218583 0.07927635312080383\n",
      "relation_encoders.0.layers.1.linear2.bias -0.009373377077281475 0.03400582820177078\n",
      "relation_encoders.0.layers.1.norm1.weight 0.9997910261154175 0.00383273558691144\n",
      "relation_encoders.0.layers.1.norm1.bias 0.0007706889882683754 0.004163404926657677\n",
      "relation_encoders.0.layers.1.norm2.weight 1.0000829696655273 0.004258608911186457\n",
      "relation_encoders.0.layers.1.norm2.bias 0.00028768955962732434 0.004315490368753672\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_weight -2.9267404897836968e-05 0.08807805180549622\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_bias 8.170122600859031e-05 0.0031285639852285385\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.weight 0.002719977404922247 0.12505657970905304\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.bias -0.0006288581062108278 0.004094947129487991\n",
      "relation_encoders.0.layers.2.linear1.weight -0.0006398617988452315 0.07878276705741882\n",
      "relation_encoders.0.layers.2.linear1.bias -0.005732446908950806 0.07328998297452927\n",
      "relation_encoders.0.layers.2.linear2.weight 1.9881990738213062e-05 0.07885075360536575\n",
      "relation_encoders.0.layers.2.linear2.bias -0.008686903864145279 0.035171035677194595\n",
      "relation_encoders.0.layers.2.norm1.weight 0.9998620748519897 0.004006283823400736\n",
      "relation_encoders.0.layers.2.norm1.bias 0.00020072795450687408 0.004021303728222847\n",
      "relation_encoders.0.layers.2.norm2.weight 0.9963234066963196 0.0034979921765625477\n",
      "relation_encoders.0.layers.2.norm2.bias 3.4133554436266422e-06 0.0028177639469504356\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_weight 0.000265628652414307 0.08834394812583923\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_bias 0.0004249083576723933 0.003720983164384961\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.weight 0.0004572804318740964 0.12426721304655075\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.bias 0.0005388862336985767 0.004537020344287157\n",
      "relation_encoders.1.layers.0.linear1.weight 4.398028249852359e-05 0.07916555553674698\n",
      "relation_encoders.1.layers.0.linear1.bias 0.002110597211867571 0.07184083759784698\n",
      "relation_encoders.1.layers.0.linear2.weight -0.00038751031388528645 0.07926628738641739\n",
      "relation_encoders.1.layers.0.linear2.bias -0.002370561007410288 0.037940751761198044\n",
      "relation_encoders.1.layers.0.norm1.weight 1.000156283378601 0.004404924344271421\n",
      "relation_encoders.1.layers.0.norm1.bias -0.0009659524075686932 0.004814100917428732\n",
      "relation_encoders.1.layers.0.norm2.weight 0.9997143745422363 0.004446669481694698\n",
      "relation_encoders.1.layers.0.norm2.bias 6.785030564060435e-05 0.004822855815291405\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_weight -0.00034048149245791137 0.08954796940088272\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_bias -0.00036315940087661147 0.0040542664937675\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.weight 0.0032436521723866463 0.12488864362239838\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.bias -0.0006396118551492691 0.004749721847474575\n",
      "relation_encoders.1.layers.1.linear1.weight -0.0011154102394357324 0.07891463488340378\n",
      "relation_encoders.1.layers.1.linear1.bias 0.0023593460209667683 0.07172828912734985\n",
      "relation_encoders.1.layers.1.linear2.weight 0.0004103513201698661 0.078761026263237\n",
      "relation_encoders.1.layers.1.linear2.bias -0.0026817256584763527 0.03763480484485626\n",
      "relation_encoders.1.layers.1.norm1.weight 0.9999446868896484 0.00447428785264492\n",
      "relation_encoders.1.layers.1.norm1.bias -0.0003364899312146008 0.004102115519344807\n",
      "relation_encoders.1.layers.1.norm2.weight 1.0002963542938232 0.004103390499949455\n",
      "relation_encoders.1.layers.1.norm2.bias -0.00031190033769235015 0.004032974597066641\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_weight 0.0001677596301306039 0.08855555951595306\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_bias 0.0001404515642207116 0.00349932792596519\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.weight -0.0002552065416239202 0.12466559559106827\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.bias -9.308769949711859e-05 0.004009374417364597\n",
      "relation_encoders.1.layers.2.linear1.weight 0.0009991955012083054 0.07954713702201843\n",
      "relation_encoders.1.layers.2.linear1.bias 0.0025781099684536457 0.0723901242017746\n",
      "relation_encoders.1.layers.2.linear2.weight -0.00011451033060438931 0.079545758664608\n",
      "relation_encoders.1.layers.2.linear2.bias -0.002485032891854644 0.038432490080595016\n",
      "relation_encoders.1.layers.2.norm1.weight 1.0003960132598877 0.004159948788583279\n",
      "relation_encoders.1.layers.2.norm1.bias -0.0003337000962346792 0.004323474131524563\n",
      "relation_encoders.1.layers.2.norm2.weight 0.9970671534538269 0.0037770606577396393\n",
      "relation_encoders.1.layers.2.norm2.bias 0.00014707354421261698 0.0025909091345965862\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_weight 0.0004992469912394881 0.08859118819236755\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_bias -3.021937118319329e-05 0.003601959440857172\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.weight 0.0029512052424252033 0.12574440240859985\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.bias 0.00018804124556481838 0.004922656342387199\n",
      "relation_encoders.2.layers.0.linear1.weight -2.0984531147405505e-05 0.07947754114866257\n",
      "relation_encoders.2.layers.0.linear1.bias -0.0010420430917292833 0.06892095506191254\n",
      "relation_encoders.2.layers.0.linear2.weight 0.0001597667287569493 0.07883339375257492\n",
      "relation_encoders.2.layers.0.linear2.bias 0.008586840704083443 0.03412378579378128\n",
      "relation_encoders.2.layers.0.norm1.weight 1.0002703666687012 0.005035898182541132\n",
      "relation_encoders.2.layers.0.norm1.bias -0.00046774608199484646 0.0046255444176495075\n",
      "relation_encoders.2.layers.0.norm2.weight 1.000077247619629 0.004267703741788864\n",
      "relation_encoders.2.layers.0.norm2.bias 0.0001161960099125281 0.004773308057337999\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_weight 0.000391664681956172 0.08896477520465851\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_bias 0.00022235330834519118 0.0033099339343607426\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.weight -0.00107470469083637 0.12430936098098755\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.bias -0.0003706474381033331 0.004182079806923866\n",
      "relation_encoders.2.layers.1.linear1.weight 0.00019244174472987652 0.07942836731672287\n",
      "relation_encoders.2.layers.1.linear1.bias -0.0003956003347411752 0.06869547069072723\n",
      "relation_encoders.2.layers.1.linear2.weight -0.0002448853920213878 0.07936294376850128\n",
      "relation_encoders.2.layers.1.linear2.bias 0.008986216969788074 0.03372976556420326\n",
      "relation_encoders.2.layers.1.norm1.weight 0.9999485015869141 0.003865552134811878\n",
      "relation_encoders.2.layers.1.norm1.bias -8.049886673688889e-05 0.004228807054460049\n",
      "relation_encoders.2.layers.1.norm2.weight 1.000102162361145 0.00462227500975132\n",
      "relation_encoders.2.layers.1.norm2.bias -0.0002346170658711344 0.004545920062810183\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_weight -0.0004691704816650599 0.088526651263237\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_bias 0.0001932396407937631 0.003406353062018752\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.weight 6.095378194004297e-05 0.1250632256269455\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.bias 0.0006788609898649156 0.004372380208224058\n",
      "relation_encoders.2.layers.2.linear1.weight 9.598431643098593e-05 0.07911171764135361\n",
      "relation_encoders.2.layers.2.linear1.bias -0.00046582904178649187 0.06882739067077637\n",
      "relation_encoders.2.layers.2.linear2.weight -0.0003397237742319703 0.07946241647005081\n",
      "relation_encoders.2.layers.2.linear2.bias 0.009423060342669487 0.033310797065496445\n",
      "relation_encoders.2.layers.2.norm1.weight 1.0003130435943604 0.00416047777980566\n",
      "relation_encoders.2.layers.2.norm1.bias -0.00016125122783705592 0.004796587396413088\n",
      "relation_encoders.2.layers.2.norm2.weight 0.9972108602523804 0.0042389933951199055\n",
      "relation_encoders.2.layers.2.norm2.bias 0.0005509418551810086 0.0025346700567752123\n",
      "classifier.0.weight -6.929288701940095e-06 0.05330759659409523\n",
      "classifier.0.bias -0.0012545359786599874 0.04204427823424339\n",
      "classifier.3.weight -0.0001551336608827114 0.06239447370171547\n",
      "classifier.3.bias -2.175290137529373e-05 0.02749512903392315\n",
      "Pretrain Epoch: 012, BSNE Loss: 1.0855\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "local_loss: 0.4527913193736127,global_loss: 0.4320122003555298\n",
      "local_loss: 0.17159865246013084,global_loss: 0.7015641927719116\n",
      "local_loss: 0.23638812223818936,global_loss: 1.204330325126648\n",
      "local_loss: 0.30858194865649563,global_loss: 0.4049128592014313\n",
      "local_loss: 0.2842371431371817,global_loss: 0.653351366519928\n",
      "local_loss: 0.26338306542705475,global_loss: 1.0054980516433716\n",
      "local_loss: 0.3733947589851772,global_loss: 0.028093742206692696\n",
      "local_loss: 0.12501077436907454,global_loss: 1.1796491146087646\n",
      "feature_proj.0.weight 0.0007618343806825578 0.15116167068481445\n",
      "feature_proj.0.bias 0.018810998648405075 0.10882958769798279\n",
      "feature_proj.1.weight 1.0005062818527222 0.004552735481411219\n",
      "feature_proj.1.bias 0.0005073860520496964 0.00450411019846797\n",
      "norm.weight 1.0003145933151245 0.004604995716363192\n",
      "norm.bias 5.263001366984099e-05 0.004704958293586969\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_weight -0.0006624071393162012 0.08829337358474731\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_bias -4.053937300341204e-05 0.004110842011868954\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.weight -0.0009404795709997416 0.12495820969343185\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.bias 0.0003905555058736354 0.004807972349226475\n",
      "relation_encoders.0.layers.0.linear1.weight -0.00012021028669551015 0.07918012142181396\n",
      "relation_encoders.0.layers.0.linear1.bias -0.006240599788725376 0.07291620969772339\n",
      "relation_encoders.0.layers.0.linear2.weight -0.0003330763429403305 0.07880230247974396\n",
      "relation_encoders.0.layers.0.linear2.bias -0.009252524003386497 0.03524501994252205\n",
      "relation_encoders.0.layers.0.norm1.weight 0.9999116659164429 0.004955513868480921\n",
      "relation_encoders.0.layers.0.norm1.bias -0.0002223800984211266 0.004153416957706213\n",
      "relation_encoders.0.layers.0.norm2.weight 0.9996547698974609 0.004435065668076277\n",
      "relation_encoders.0.layers.0.norm2.bias 0.0004979771911166608 0.004396216012537479\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_weight -0.00015585408254992217 0.08827643096446991\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_bias -0.00010327198833692819 0.00344749353826046\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.weight -0.0014321287162601948 0.12546616792678833\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.bias -0.000324229768011719 0.00453319912776351\n",
      "relation_encoders.0.layers.1.linear1.weight 0.0007742285961285233 0.07918727397918701\n",
      "relation_encoders.0.layers.1.linear1.bias -0.006132151000201702 0.07311824709177017\n",
      "relation_encoders.0.layers.1.linear2.weight -0.00038580631371587515 0.07928124070167542\n",
      "relation_encoders.0.layers.1.linear2.bias -0.009375084191560745 0.03398868814110756\n",
      "relation_encoders.0.layers.1.norm1.weight 0.9997838139533997 0.003964219242334366\n",
      "relation_encoders.0.layers.1.norm1.bias 0.0007489394047297537 0.004281470086425543\n",
      "relation_encoders.0.layers.1.norm2.weight 1.0000839233398438 0.004379096906632185\n",
      "relation_encoders.0.layers.1.norm2.bias 0.00032196627580560744 0.004446304868906736\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_weight -2.9385933885350823e-05 0.08808527886867523\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_bias 8.15483508631587e-05 0.003251660382375121\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.weight 0.002717399736866355 0.12505358457565308\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.bias -0.0006873425445519388 0.004224634729325771\n",
      "relation_encoders.0.layers.2.linear1.weight -0.0006401654100045562 0.07879094779491425\n",
      "relation_encoders.0.layers.2.linear1.bias -0.005725438706576824 0.07329456508159637\n",
      "relation_encoders.0.layers.2.linear2.weight 2.9012779123149812e-05 0.07885976880788803\n",
      "relation_encoders.0.layers.2.linear2.bias -0.008664430119097233 0.0352243036031723\n",
      "relation_encoders.0.layers.2.norm1.weight 0.9998495578765869 0.004156390205025673\n",
      "relation_encoders.0.layers.2.norm1.bias 0.0001995677303057164 0.004175364039838314\n",
      "relation_encoders.0.layers.2.norm2.weight 0.996053159236908 0.0036405061837285757\n",
      "relation_encoders.0.layers.2.norm2.bias -2.0913430489599705e-05 0.0029037329368293285\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_weight 0.00025817047571763396 0.08836007863283157\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_bias 0.0004528749268501997 0.0038726746570318937\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.weight 0.0004578511288855225 0.12427671998739243\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.bias 0.0005659673479385674 0.0047043017111718655\n",
      "relation_encoders.1.layers.0.linear1.weight 4.634741344489157e-05 0.07917413860559464\n",
      "relation_encoders.1.layers.0.linear1.bias 0.002081965794786811 0.07183820754289627\n",
      "relation_encoders.1.layers.0.linear2.weight -0.0003904032055288553 0.07927311956882477\n",
      "relation_encoders.1.layers.0.linear2.bias -0.002388653112575412 0.037963785231113434\n",
      "relation_encoders.1.layers.0.norm1.weight 1.000151515007019 0.0045684874057769775\n",
      "relation_encoders.1.layers.0.norm1.bias -0.0009834467200562358 0.004979414399713278\n",
      "relation_encoders.1.layers.0.norm2.weight 0.9996980428695679 0.0046179709024727345\n",
      "relation_encoders.1.layers.0.norm2.bias 6.477346323663369e-05 0.004969222936779261\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_weight -0.00033931725192815065 0.08955688774585724\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_bias -0.000385898572858423 0.004199324641376734\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.weight 0.003245103405788541 0.12488943338394165\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.bias -0.0006752818007953465 0.004910914693027735\n",
      "relation_encoders.1.layers.1.linear1.weight -0.0011148055782541633 0.07892116159200668\n",
      "relation_encoders.1.layers.1.linear1.bias 0.0023299132008105516 0.07173740863800049\n",
      "relation_encoders.1.layers.1.linear2.weight 0.0004043073859065771 0.07876618206501007\n",
      "relation_encoders.1.layers.1.linear2.bias -0.0026922360993921757 0.03757631406188011\n",
      "relation_encoders.1.layers.1.norm1.weight 0.9999313950538635 0.004623572342097759\n",
      "relation_encoders.1.layers.1.norm1.bias -0.0003212807350791991 0.004229939077049494\n",
      "relation_encoders.1.layers.1.norm2.weight 1.0002809762954712 0.004213021602481604\n",
      "relation_encoders.1.layers.1.norm2.bias -0.0002586802002042532 0.004174560308456421\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_weight 0.00016805692575871944 0.0885578840970993\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_bias 0.00015100777090992779 0.003659202018752694\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.weight -0.00025297299725934863 0.12467323988676071\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.bias -0.00016030026017688215 0.004125801846385002\n",
      "relation_encoders.1.layers.2.linear1.weight 0.0009992123814299703 0.07955430448055267\n",
      "relation_encoders.1.layers.2.linear1.bias 0.002557572443038225 0.07240383327007294\n",
      "relation_encoders.1.layers.2.linear2.weight -0.00011135489330627024 0.07955227047204971\n",
      "relation_encoders.1.layers.2.linear2.bias -0.0024794330820441246 0.038455430418252945\n",
      "relation_encoders.1.layers.2.norm1.weight 1.0003985166549683 0.004279788583517075\n",
      "relation_encoders.1.layers.2.norm1.bias -0.0003462567983660847 0.004493922460824251\n",
      "relation_encoders.1.layers.2.norm2.weight 0.9968260526657104 0.003894966561347246\n",
      "relation_encoders.1.layers.2.norm2.bias 0.0001541435340186581 0.0027145766653120518\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_weight 0.0004975094925612211 0.08859860897064209\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_bias -1.639083529880736e-05 0.003773176111280918\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.weight 0.002950043883174658 0.12574908137321472\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.bias 0.00019058803445659578 0.005136973224580288\n",
      "relation_encoders.2.layers.0.linear1.weight -1.8627120880410075e-05 0.0794878900051117\n",
      "relation_encoders.2.layers.0.linear1.bias -0.001063875388354063 0.06891219317913055\n",
      "relation_encoders.2.layers.0.linear2.weight 0.0001614894426893443 0.07884234935045242\n",
      "relation_encoders.2.layers.0.linear2.bias 0.008585380390286446 0.034104764461517334\n",
      "relation_encoders.2.layers.0.norm1.weight 1.000291109085083 0.005240302532911301\n",
      "relation_encoders.2.layers.0.norm1.bias -0.0005042814882472157 0.004798804875463247\n",
      "relation_encoders.2.layers.0.norm2.weight 1.0000783205032349 0.004371806047856808\n",
      "relation_encoders.2.layers.0.norm2.bias 0.00011485556024126709 0.00489406194537878\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_weight 0.0003914278349839151 0.08897103369235992\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_bias 0.00021560024470090866 0.00339927664026618\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.weight -0.0010738379787653685 0.12431444227695465\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.bias -0.00036214926512911916 0.004346858244389296\n",
      "relation_encoders.2.layers.1.linear1.weight 0.00019233563216403127 0.07943428307771683\n",
      "relation_encoders.2.layers.1.linear1.bias -0.0004022313514724374 0.06869251281023026\n",
      "relation_encoders.2.layers.1.linear2.weight -0.00024245778331533074 0.07936935871839523\n",
      "relation_encoders.2.layers.1.linear2.bias 0.008997870609164238 0.03370972350239754\n",
      "relation_encoders.2.layers.1.norm1.weight 0.9999430179595947 0.003984071314334869\n",
      "relation_encoders.2.layers.1.norm1.bias -3.504473716020584e-05 0.004339376464486122\n",
      "relation_encoders.2.layers.1.norm2.weight 1.0000941753387451 0.004795050714164972\n",
      "relation_encoders.2.layers.1.norm2.bias -0.000249616801738739 0.00471300445497036\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_weight -0.0004690909991040826 0.08853193372488022\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_bias 0.000207858785870485 0.003532466944307089\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.weight 6.757635856047273e-05 0.12506702542304993\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.bias 0.0007376977591775358 0.00452463049441576\n",
      "relation_encoders.2.layers.2.linear1.weight 9.582273196429014e-05 0.07911857217550278\n",
      "relation_encoders.2.layers.2.linear1.bias -0.00044502445962280035 0.06881853938102722\n",
      "relation_encoders.2.layers.2.linear2.weight -0.00032263767207041383 0.07947050034999847\n",
      "relation_encoders.2.layers.2.linear2.bias 0.009462849237024784 0.033383265137672424\n",
      "relation_encoders.2.layers.2.norm1.weight 1.0003211498260498 0.00430716946721077\n",
      "relation_encoders.2.layers.2.norm1.bias -0.0001605882862349972 0.0050287810154259205\n",
      "relation_encoders.2.layers.2.norm2.weight 0.9969645142555237 0.004381326958537102\n",
      "relation_encoders.2.layers.2.norm2.bias 0.0005881275283172727 0.0026351711712777615\n",
      "classifier.0.weight -6.929288701940095e-06 0.05330759659409523\n",
      "classifier.0.bias -0.0012545359786599874 0.04204427823424339\n",
      "classifier.3.weight -0.0001551336608827114 0.06239447370171547\n",
      "classifier.3.bias -2.175290137529373e-05 0.02749512903392315\n",
      "Pretrain Epoch: 013, BSNE Loss: 0.9781\n"
     ]
    }
   ],
   "source": [
    "# 进行完整测试（新）\n",
    "\n",
    "# 参数设置\n",
    "args = {\n",
    "    \"dataset\": \"amazon\",\n",
    "#     \"dataset\": \"yelp\",\n",
    "    \"batch_size\": 8,\n",
    "    \"sample_size\": 50,\n",
    "    \"weight_decay\": 0.00005,\n",
    "    \"emb_size\": 32,\n",
    "    \"pretrain_epochs\": 200,\n",
    "    \"pretrain_lr\": 0.001, #0.0001\n",
    "    \"finetune_lr\": 0.0005,\n",
    "    \"num_epochs\": 200,\n",
    "    \"pretrain_patience\": 20,\n",
    "    \"patience\": 30,\n",
    "    \"tsne_weight\": 0.3,\n",
    "    \"weight\": 0.6,\n",
    "    \"layers\": 7,\n",
    "    \"test_size\": 0.6,\n",
    "    \"val_size\": 0.5,\n",
    "    \"layers_tree\": 7,\n",
    "    \"seed\": 76,\n",
    "    \"num_heads\": 2,\n",
    "    \"drop_rate\": 0.5\n",
    "}\n",
    "\n",
    "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n",
    "print(device)\n",
    "\n",
    "timestamp = time.strftime(\"%Y%m%d-%H%M%S\")\n",
    "writer = SummaryWriter(f'runs/{args[\"dataset\"]}_{timestamp}')\n",
    "\n",
    "print('loading data...')\n",
    "prefix = \"../../data/\"\n",
    "\n",
    "edge_indexs, feat_data, labels = load_data(args['dataset'], args['layers_tree'], prefix)\n",
    "\n",
    "np.random.seed(args['seed'])\n",
    "rd.seed(args['seed'])\n",
    "\n",
    "if args['dataset'] == 'yelp':\n",
    "    index = list(range(len(labels)))\n",
    "    idx_train_val, idx_test, y_train_val, y_test = train_test_split(index, labels, stratify=labels,\n",
    "                                                                    test_size=args['test_size'], random_state=2,\n",
    "                                                                    shuffle=True)\n",
    "    idx_train, idx_val, y_train, y_val = train_test_split(idx_train_val, y_train_val,\n",
    "                                                          stratify=y_train_val, test_size=args['val_size'],\n",
    "                                                          random_state=2, shuffle=True)\n",
    "    dist_path = os.path.join(prefix, \"YelpChi_shortest_distance.pkl\")\n",
    "elif args['dataset'] == 'amazon':\n",
    "    index = list(range(3305, len(labels)))\n",
    "    idx_train_val, idx_test, y_train_val, y_test = train_test_split(index, labels[3305:],\n",
    "                                                                    stratify=labels[3305:],\n",
    "                                                                    test_size=args['test_size'],\n",
    "                                                                    random_state=2, shuffle=True)\n",
    "    idx_train, idx_val, y_train, y_val = train_test_split(idx_train_val, y_train_val,\n",
    "                                                          stratify=y_train_val, test_size=args['val_size'],\n",
    "                                                          random_state=2, shuffle=True)\n",
    "    dist_path = os.path.join(prefix, \"Amazon_shortest_distance.pkl\")\n",
    "\n",
    "with open(dist_path, 'rb') as f:\n",
    "    dist_data = pickle.load(f)\n",
    "    dist_matrix = torch.tensor(dist_data['dist_matrix']).to(device)\n",
    "\n",
    "# 准备特征数据\n",
    "feat_data = torch.tensor(feat_data).float()\n",
    "# 特征归一化\n",
    "scaler = MinMaxScaler()\n",
    "feat_data = torch.tensor(scaler.fit_transform(feat_data)).float().to(device)\n",
    "\n",
    "# 初始化模型\n",
    "bsne_model = BSNE_Transformer(\n",
    "    in_feat=feat_data.shape[1],\n",
    "    out_feat=2,\n",
    "    relation_nums=len(edge_indexs),\n",
    "    d_model=64,\n",
    "    nhead=args['num_heads'],\n",
    "    num_layers=3,\n",
    "    dim_feedforward=256,\n",
    "    drop_rate=args['drop_rate']\n",
    ").to(device)\n",
    "\n",
    "# 将边索引转移到设备\n",
    "for edge_index in edge_indexs:\n",
    "    edge_index[0] = edge_index[0].to(device)\n",
    "    edge_index[1] = [tensor.to(device) for tensor in edge_index[1]]\n",
    "\n",
    "all_local_losses = []      # 每个epoch的batch本地损失列表\n",
    "all_global_losses = []     # 每个epoch的batch全局损失列表\n",
    "all_bsne_losses = []       # 每个epoch的总BSNE损失\n",
    "epoch_avg_local = []       # 每个epoch的平均本地损失\n",
    "epoch_avg_global = []      # 每个epoch的平均全局损失\n",
    "    \n",
    "    \n",
    "print(\"\\n=== Starting Pretraining ===\")\n",
    "\n",
    "bsne_model.classifier.requires_grad_(False)\n",
    "optimizer = torch.optim.AdamW(\n",
    "    filter(lambda p: p.requires_grad, bsne_model.parameters()),\n",
    "    lr=args['pretrain_lr'],\n",
    "    weight_decay=5e-5\n",
    ")\n",
    "pretrain_best_loss = float('inf')\n",
    "pretrain_no_improve = 0\n",
    "pretrain_early_stop = False\n",
    "\n",
    "for epoch in range(args['pretrain_epochs']):\n",
    "#     if pretrain_early_stop:\n",
    "#         break\n",
    "\n",
    "    bsne_model.train()\n",
    "    optimizer.zero_grad()\n",
    "    \n",
    "    \n",
    "    epoch_local_losses = []\n",
    "    epoch_global_losses = []\n",
    "    \n",
    "    \n",
    "    # batch_centers = rd.sample(range(feat_data.shape[0]), args['batch_size'])\n",
    "    batch_centers = np.random.choice(range(feat_data.shape[0]), size=args['batch_size'], replace=False)\n",
    "\n",
    "\n",
    "    sample_size = args['sample_size']\n",
    "    temperature = 100\n",
    "    eps = 1e-10\n",
    "\n",
    "    total_loss = 0.0\n",
    "    batch_size = len(batch_centers)\n",
    "\n",
    "    # 为每个中心节点构建Bp子图\n",
    "    bp_subgraphs = []\n",
    "    for xi in batch_centers:\n",
    "        # 根据距离矩阵计算采样概率\n",
    "        dist_row = dist_matrix[xi].cpu().numpy()\n",
    "        probs = np.power(10.0, -dist_row)\n",
    "        probs[xi] = 0  # 排除自身\n",
    "        probs /= probs.sum()\n",
    "\n",
    "        # 采样邻居节点\n",
    "        available_nodes = len(dist_row) - 1\n",
    "        actual_sample_size = min(sample_size, available_nodes)\n",
    "\n",
    "        if actual_sample_size > 0:\n",
    "            neighbors = np.random.choice(len(dist_row), size=actual_sample_size, p=probs, replace=False)\n",
    "            subgraph_nodes = [xi] + neighbors.tolist()\n",
    "        else:\n",
    "            subgraph_nodes = [xi]\n",
    "\n",
    "        bp_subgraphs.append(subgraph_nodes)\n",
    "\n",
    "    # 为每个中心节点构建Bu子图（均匀采样）\n",
    "    bu_subgraphs = []\n",
    "    for xi in batch_centers:\n",
    "        dist_row = dist_matrix[xi].cpu().numpy()\n",
    "\n",
    "        # 随机采样节点构建Bu子图\n",
    "        available_nodes = len(dist_row) - 1\n",
    "        actual_sample_size = min(sample_size, available_nodes)\n",
    "\n",
    "        if actual_sample_size > 0:\n",
    "            neighbors = np.random.choice(len(dist_row), size=actual_sample_size, replace=False)\n",
    "            subgraph_nodes = [xi] + neighbors.tolist()\n",
    "        else:\n",
    "            subgraph_nodes = [xi]\n",
    "\n",
    "        bu_subgraphs.append(subgraph_nodes)\n",
    "    # 计算Bp子图中所有节点的特征\n",
    "    bp_features_batch = []\n",
    "    for bp_nodes in bp_subgraphs:\n",
    "        bp_node_features = []\n",
    "\n",
    "        # 为子图中的每个节点计算特征\n",
    "        for node_idx in bp_nodes:\n",
    "            # 构建该节点的一阶邻居子图\n",
    "            node_subgraph = create_node_subgraph(node_idx, feat_data, edge_indexs, device)\n",
    "\n",
    "            # 获取节点特征\n",
    "            _, node_feature = bsne_model([node_subgraph])\n",
    "\n",
    "            bp_node_features.append(node_feature.squeeze(0))\n",
    "\n",
    "        # 将该Bp子图所有节点的特征收集起来\n",
    "        bp_features_batch.append(torch.stack(bp_node_features))  # [sample_size+1, d_model]\n",
    "\n",
    "    # 计算Bu子图中所有节点的特征\n",
    "    bu_features_batch = []\n",
    "    for bu_nodes in bu_subgraphs:\n",
    "        bu_node_features = []\n",
    "\n",
    "        # 为子图中的每个节点计算特征\n",
    "        for node_idx in bu_nodes:\n",
    "            # 构建该节点的一阶邻居子图\n",
    "            node_subgraph = create_node_subgraph(node_idx, feat_data, edge_indexs, device)\n",
    "\n",
    "            # 获取节点特征\n",
    "            _, node_feature = bsne_model([node_subgraph])\n",
    "\n",
    "            bu_node_features.append(node_feature.squeeze(0))\n",
    "\n",
    "        # 将该Bu子图所有节点的特征收集起来\n",
    "        bu_features_batch.append(torch.stack(bu_node_features))  # [sample_size+1, d_model]\n",
    "\n",
    "    # 针对每一对Bp和Bu子图计算loss\n",
    "    for i in range(batch_size):\n",
    "        # 获取中心节点索引和特征\n",
    "        center_idx = batch_centers[i]\n",
    "        center_feature = bp_features_batch[i][0]  # 中心节点是第一个\n",
    "\n",
    "        # 仅获取Bp子图中的其他节点（排除中心节点）\n",
    "        other_bp_indices = bp_subgraphs[i][1:]\n",
    "        other_bp_features = bp_features_batch[i][1:]\n",
    "        \n",
    "        with torch.no_grad():\n",
    "            # 计算中心节点到其他Bp节点的原始距离\n",
    "            orig_dists_bp = dist_matrix[center_idx][other_bp_indices].cpu().numpy()\n",
    "\n",
    "            # 计算P向量（基于节点路径距离）\n",
    "        #     P = (1.0 + orig_dists_bp ** 2) ** -1\n",
    "            P = np.exp(-orig_dists_bp)\n",
    "        #     P = np.power(10.0, -orig_dists_bp)\n",
    "            P_sum = P.sum()\n",
    "            P = P / (P_sum + eps)\n",
    "            P = torch.tensor(P, device=device)\n",
    "\n",
    "        # 计算Q向量（基于特征距离）\n",
    "    #     Q = (1.0 + feat_dists_bp ** 2 / temperature) ** -1\n",
    "    #     Q = np.exp(-feat_dists_bp.detach().numpy())\n",
    "#         Q = np.exp(-feat_dists_bp.detach().cpu().numpy())\n",
    "        # 计算中心节点到其他Bp节点的特征距离\n",
    "        feat_dists_bp = torch.cdist(center_feature.unsqueeze(0), other_bp_features).squeeze(0)\n",
    "        feat_dists_bp.requires_grad_(True)\n",
    "#         Q = np.exp(-feat_dists_bp.cpu().numpy())\n",
    "#         Q_sum = Q.sum()\n",
    "#         Q = Q / (Q_sum + eps)\n",
    "#         Q = torch.tensor(Q, device=device)\n",
    "        \n",
    "        Q = torch.softmax(-feat_dists_bp, dim=0)\n",
    "        \n",
    "#         log_ratio = torch.log(P + eps) - torch.log(Q + eps)\n",
    "        log_ratio = torch.log(((P + eps)/(Q + eps))**2)\n",
    "        loss_local = log_ratio.mean() \n",
    "\n",
    "        # 获取全局距离并计算概率\n",
    "        all_dists = dist_matrix[center_idx].cpu().numpy()\n",
    "        unnorm_probs_global = np.exp(-all_dists)\n",
    "        unnorm_probs_global[center_idx] = 0  # 排除自身\n",
    "\n",
    "        # 计算全局归一化常数\n",
    "        Z_global = unnorm_probs_global.sum() + eps\n",
    "\n",
    "        # 计算B_p节点的全局概率和\n",
    "        bp_global_prob_sum = unnorm_probs_global[other_bp_indices].sum() / Z_global\n",
    "\n",
    "        # 计算k_Bp (公式定义)\n",
    "        N = dist_matrix.shape[0]\n",
    "        k_Bp = bp_global_prob_sum * (N / len(other_bp_indices))\n",
    "\n",
    "        bu_features = bu_features_batch[i][1:]  # 排除中心节点\n",
    "        feat_dists_bu = torch.cdist(center_feature.unsqueeze(0), bu_features).squeeze(0)\n",
    "\n",
    "        sum_e_bu = torch.exp(-feat_dists_bu).sum()\n",
    "        sum_e_bp = torch.exp(-feat_dists_bp).sum()\n",
    "        global_ratio = k_Bp * (sum_e_bu / (sum_e_bp + eps))\n",
    "        loss_global = torch.log(global_ratio.clamp(min=eps, max=1e10)**2)\n",
    "\n",
    "        # = 5. 损失合并 =\n",
    "        total_loss += loss_local + loss_global\n",
    "        \n",
    "        epoch_local_losses.append(loss_local.item())\n",
    "        epoch_global_losses.append(loss_global.item())\n",
    "        \n",
    "        print(f\"local_loss: {loss_local.item()},global_loss: {loss_global.item()}\")\n",
    "\n",
    "    bsne_loss = total_loss / batch_size\n",
    "    \n",
    "    \n",
    "    all_local_losses.append(epoch_local_losses)\n",
    "    all_global_losses.append(epoch_global_losses)\n",
    "    \n",
    "    avg_local = np.mean(epoch_local_losses)\n",
    "    avg_global = np.mean(epoch_global_losses)\n",
    "    epoch_avg_local.append(avg_local)\n",
    "    epoch_avg_global.append(avg_global)\n",
    "    \n",
    "    bsne_loss = total_loss / batch_size\n",
    "    \n",
    "    bsne_loss_item = bsne_loss.item()\n",
    "    all_bsne_losses.append(bsne_loss_item)\n",
    "\n",
    "    bsne_loss.backward()\n",
    "    optimizer.step()\n",
    "    \n",
    "    for name, param in bsne_model.named_parameters():\n",
    "        print(name, param.data.mean().item(), param.data.std().item())\n",
    "#     for name, parms in bsne_model.named_parameters():\n",
    "#         print('-->name:', name, '-->grad_requirs:', parms.requires_grad, '--weight', torch.mean(parms.data), ' -->grad_value:', torch.mean(parms.grad))\n",
    "    \n",
    "    # 更新最佳损失和早停计数器\n",
    "#     if bsne_loss.item() < pretrain_best_loss:\n",
    "#         pretrain_best_loss = bsne_loss.item()\n",
    "#         pretrain_no_improve = 0\n",
    "#     else:\n",
    "#         pretrain_no_improve += 1\n",
    "\n",
    "#     if pretrain_no_improve >= args['pretrain_patience']:\n",
    "#         print(f\"Pretrain early stopping at epoch {epoch}\")\n",
    "#         pretrain_early_stop = True\n",
    "\n",
    "    writer.add_scalar('Pretrain/TSNE_Loss', bsne_loss.item(), epoch)\n",
    "    print(f'Pretrain Epoch: {epoch:03d}, BSNE Loss: {bsne_loss.item():.4f}')\n",
    "#     if epoch % 5 == 0:\n",
    "#         print(f'Pretrain Epoch: {epoch:03d}, BSNE Loss: {bsne_loss.item():.4f}')\n",
    "\n",
    "\n",
    "plot_losses(all_local_losses, 'Local Loss During Pretraining', 'Loss', 'local_loss.png')\n",
    "plot_losses(all_global_losses, 'Global Loss During Pretraining', 'Loss', 'global_loss.png')\n",
    "\n",
    "# 绘制BSNE总损失\n",
    "plt.figure(figsize=(12, 6))\n",
    "plt.plot(all_bsne_losses, 'g-', linewidth=2, label='BSNE Loss')\n",
    "plt.title('Total BSNE Loss During Pretraining')\n",
    "plt.xlabel('Epoch')\n",
    "plt.ylabel('Loss')\n",
    "plt.grid(True)\n",
    "plt.savefig('bsne_loss.png')\n",
    "plt.close()\n",
    "\n",
    "# 绘制局部和全局平均损失对比\n",
    "plt.figure(figsize=(12, 6))\n",
    "plt.plot(epoch_avg_local, 'b-', linewidth=2, label='Local Loss (Avg)')\n",
    "plt.plot(epoch_avg_global, 'r-', linewidth=2, label='Global Loss (Avg)')\n",
    "plt.title('Local vs Global Loss During Pretraining')\n",
    "plt.xlabel('Epoch')\n",
    "plt.ylabel('Loss')\n",
    "plt.legend()\n",
    "plt.grid(True)\n",
    "plt.savefig('local_vs_global.png')\n",
    "plt.close()\n",
    "\n",
    "print(\"\\nLoss plots saved to local_loss.png, global_loss.png, bsne_loss.png, local_vs_global.png\")\n",
    "\n",
    "print(\"\\n=== Starting Fine-tuning ===\")\n",
    "bsne_model.classifier.requires_grad_(True)\n",
    "optimizer = torch.optim.AdamW(\n",
    "    bsne_model.parameters(),\n",
    "    lr=args['finetune_lr'],\n",
    "    weight_decay=5e-5\n",
    ")\n",
    "scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(\n",
    "    optimizer, mode='max', factor=0.5, patience=10, verbose=True\n",
    ")\n",
    "\n",
    "best_val_auc = 0.0\n",
    "best_model_state = None\n",
    "train_pos, train_neg = pos_neg_split(idx_train, y_train)\n",
    "\n",
    "no_improve_epochs = 0\n",
    "early_stop = False\n",
    "\n",
    "for epoch in range(args['num_epochs']):\n",
    "    if early_stop:\n",
    "        break\n",
    "\n",
    "    bsne_model.train()\n",
    "    total_loss = 0.0\n",
    "\n",
    "    # 采样中心节点\n",
    "    batch_centers = rd.sample(train_pos + train_neg, args['batch_size'])\n",
    "    subgraph_data = []\n",
    "\n",
    "    for xi in batch_centers:\n",
    "        subgraph = create_node_subgraph(xi, feat_data, edge_indexs, device)\n",
    "        #         print(len(subgraph['features']))\n",
    "        subgraph_data.append(subgraph)\n",
    "\n",
    "    optimizer.zero_grad()\n",
    "\n",
    "    center_logits, _ = bsne_model(subgraph_data)  # [B, 2]\n",
    "\n",
    "    # 提取中心节点标签\n",
    "    labels_center = torch.tensor([labels[xi] for xi in batch_centers]).to(device).long()\n",
    "\n",
    "    # 计算分类损失\n",
    "    cls_loss = F.nll_loss(center_logits, labels_center)\n",
    "\n",
    "    cls_loss.backward()\n",
    "    optimizer.step()\n",
    "\n",
    "    total_loss += cls_loss.item()\n",
    "\n",
    "    avg_loss = total_loss / args['batch_size']\n",
    "    writer.add_scalar('FineTune/Train_Loss', avg_loss, epoch)\n",
    "\n",
    "    if epoch % 5 == 0:\n",
    "        val_auc, val_ap, val_f1, val_g_mean = test(idx_val, y_val, bsne_model, feat_data, edge_indexs, device)\n",
    "\n",
    "        writer.add_scalar('Validation/AUC', val_auc, epoch)\n",
    "        writer.add_scalar('Validation/F1', val_f1, epoch)\n",
    "        writer.add_scalar('Validation/GMean', val_g_mean, epoch)\n",
    "\n",
    "        print(f'Epoch: {epoch:03d} | Loss: {avg_loss:.4f} | Val AUC: {val_auc:.4f} | Val F1: {val_f1:.4f}')\n",
    "\n",
    "        scheduler.step(val_auc)\n",
    "\n",
    "        if val_auc > best_val_auc:\n",
    "            best_val_auc = val_auc\n",
    "            no_improve_epochs = 0\n",
    "            best_model_state = copy.deepcopy(bsne_model.state_dict())\n",
    "        else:\n",
    "            no_improve_epochs += 1\n",
    "\n",
    "        if no_improve_epochs >= args['patience']:\n",
    "            print(f\"Early stopping at epoch {epoch}\")\n",
    "            early_stop = True\n",
    "\n",
    "# 加载最佳模型\n",
    "bsne_model.load_state_dict(best_model_state)\n",
    "\n",
    "# 最终测试\n",
    "test_auc, test_ap, test_f1, test_g_mean = test(idx_test, y_test, bsne_model, feat_data, edge_indexs, device)\n",
    "print(f'\\n=== Final Test Results ===')\n",
    "print(f'Test AUC: {test_auc:.4f} | Test AP: {test_ap:.4f} | Test F1: {test_f1:.4f} | G-mean: {test_g_mean:.4f}')\n",
    "writer.close()\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "b2425ecd",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[1.107200575515279,\n",
       " 1.061684052171675,\n",
       " 1.1939222378576388,\n",
       " 0.9272324137584343,\n",
       " 0.991151823999164,\n",
       " 0.9214618036826305,\n",
       " 0.9657002502952303,\n",
       " 1.2310986091264289,\n",
       " 1.1565664696735918,\n",
       " 0.974084948155445,\n",
       " 0.9471994480099641,\n",
       " 0.9979934395991935,\n",
       " 1.0855401512744671,\n",
       " 0.9780997046351494,\n",
       " 0.9541658706086664,\n",
       " 1.035424557346193,\n",
       " 0.9734147768536383,\n",
       " 1.039191224022804,\n",
       " 0.737816652170451,\n",
       " 1.2446047879151156,\n",
       " 1.0287895924087023,\n",
       " 0.8659443910284015,\n",
       " 1.009443279384725,\n",
       " 1.114789137073348,\n",
       " 0.7646885973115873,\n",
       " 0.9345326595268256,\n",
       " 0.9949457430082145,\n",
       " 0.9365827034306424,\n",
       " 1.0206016789781178,\n",
       " 1.023474277019406,\n",
       " 1.086509359770415,\n",
       " 0.9269521661328066,\n",
       " 0.6431055814125219,\n",
       " 1.128927328120626,\n",
       " 0.9031508466609557,\n",
       " 1.2612834583808885,\n",
       " 1.0648247609062917,\n",
       " 1.0563186111910594,\n",
       " 0.8997309025497133]"
      ]
     },
     "execution_count": 8,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "all_bsne_losses"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "b3cc19cb",
   "metadata": {},
   "outputs": [],
   "source": [
    "plt.figure(figsize=(12, 6))\n",
    "plt.plot(all_bsne_losses, 'g-', linewidth=2, label='BSNE Loss')\n",
    "plt.title('Total BSNE Loss During Pretraining')\n",
    "plt.xlabel('Epoch')\n",
    "plt.ylabel('Loss')\n",
    "plt.grid(True)\n",
    "plt.savefig('bsne_loss_tmp.png')\n",
    "plt.close()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "22700832",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "234ae883",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "8a4fc62b",
   "metadata": {},
   "outputs": [],
   "source": [
    "def plot_losses(losses, title, ylabel, filename):\n",
    "    plt.figure(figsize=(12, 6))\n",
    "    \n",
    "    # 绘制每个batch的详细损失（浅色点）\n",
    "    for epoch, batch_losses in enumerate(losses):\n",
    "        plt.scatter([epoch] * len(batch_losses), batch_losses, \n",
    "                   color='lightblue', alpha=0.4, s=10)\n",
    "    \n",
    "    # 绘制每个epoch的平均损失（深色线）\n",
    "    avg_losses = [np.mean(batch_losses) for batch_losses in losses]\n",
    "    plt.plot(avg_losses, 'b-', linewidth=2, label='Epoch Average')\n",
    "    \n",
    "    # 绘制滑动平均线（窗口=5）\n",
    "    window = 5\n",
    "    moving_avg = [np.mean(avg_losses[max(0, i-window):i+1]) \n",
    "                 for i in range(len(avg_losses))]\n",
    "    plt.plot(moving_avg, 'r--', linewidth=2, label=f'{window}-Epoch Moving Avg')\n",
    "    \n",
    "    plt.title(title)\n",
    "    plt.xlabel('Epoch')\n",
    "    plt.ylabel(ylabel)\n",
    "    plt.legend()\n",
    "    plt.grid(True)\n",
    "    plt.savefig(filename)\n",
    "    plt.close()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "0e1139ba",
   "metadata": {},
   "outputs": [],
   "source": [
    "plot_losses(all_local_losses, 'Local Loss During Pretraining', 'Loss', 'local_loss.png')\n",
    "plot_losses(all_global_losses, 'Global Loss During Pretraining', 'Loss', 'global_loss.png')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "c155328f",
   "metadata": {},
   "outputs": [],
   "source": [
    "plt.figure(figsize=(12, 6))\n",
    "plt.plot(all_bsne_losses, 'g-', linewidth=2, label='BSNE Loss')\n",
    "plt.title('Total BSNE Loss During Pretraining')\n",
    "plt.xlabel('Epoch')\n",
    "plt.ylabel('Loss')\n",
    "plt.grid(True)\n",
    "plt.savefig('bsne_loss_2.png')\n",
    "plt.close()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "b7654321",
   "metadata": {},
   "outputs": [],
   "source": [
    "all_local_losses = []      # 每个epoch的batch本地损失列表\n",
    "all_global_losses = []     # 每个epoch的batch全局损失列表\n",
    "all_bsne_losses = []       # 每个epoch的总BSNE损失\n",
    "epoch_avg_local = []       # 每个epoch的平均本地损失\n",
    "epoch_avg_global = []      # 每个epoch的平均全局损失"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "5afcf296",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[[0.318894984125466,\n",
       "  0.09415730603833102,\n",
       "  0.4918617138241354,\n",
       "  0.2804513951145457,\n",
       "  0.08868618221165203,\n",
       "  0.9113867303179941,\n",
       "  0.24432146188164153,\n",
       "  0.2832494291757588],\n",
       " [0.37913901752180135,\n",
       "  0.5261684953480049,\n",
       "  0.8797139611549896,\n",
       "  0.30346543597584313,\n",
       "  0.3910120217812854,\n",
       "  0.5231309853624211,\n",
       "  0.5742858183210693,\n",
       "  0.28359737880479446],\n",
       " [0.32511359473131535,\n",
       "  0.22773384540768454,\n",
       "  0.352377698433856,\n",
       "  0.2170029930196404,\n",
       "  0.47281822665986706,\n",
       "  0.25676246028102445,\n",
       "  0.24751037847255486,\n",
       "  0.6245152100982301],\n",
       " [0.13166377686109862,\n",
       "  0.2766720371896985,\n",
       "  -0.05386004340781954,\n",
       "  0.5630936942985744,\n",
       "  0.4069364070745354,\n",
       "  0.3336152985319311,\n",
       "  0.1509819002466012,\n",
       "  0.26778985928636195],\n",
       " [0.4320987011089848,\n",
       "  0.15748576785963417,\n",
       "  0.3938704457971992,\n",
       "  0.24329365893880858,\n",
       "  0.23496912298866343,\n",
       "  -0.020173186591273897,\n",
       "  0.6918022185714879,\n",
       "  0.5838637726539995],\n",
       " [0.2593510643541698,\n",
       "  0.3669624056925919,\n",
       "  0.03888873600589818,\n",
       "  0.24089571916975538,\n",
       "  0.6972934867716148,\n",
       "  0.3269972298155711,\n",
       "  0.5946743777984239,\n",
       "  0.5232795732741286],\n",
       " [0.1467046411642395,\n",
       "  0.22397348907595038,\n",
       "  0.28870576828323574,\n",
       "  0.30975774446951665,\n",
       "  0.16097410405358778,\n",
       "  0.8461254863788272,\n",
       "  0.39115101130579133,\n",
       "  0.22022833204805356],\n",
       " [0.06693640408653133,\n",
       "  0.2643270853910218,\n",
       "  0.1605261054550336,\n",
       "  0.0629868407761283,\n",
       "  0.2535947936202589,\n",
       "  0.5939216447911237,\n",
       "  0.6772086679337699,\n",
       "  0.20070990511280828],\n",
       " [0.6633988792885007,\n",
       "  0.3645825382268772,\n",
       "  0.05823089125971013,\n",
       "  0.42573410151521324,\n",
       "  0.2390175513702671,\n",
       "  0.30132875577799584,\n",
       "  0.3835194964048758,\n",
       "  0.47681752922613],\n",
       " [0.4742982456712902,\n",
       "  0.4273709014978655,\n",
       "  0.7024527683471224,\n",
       "  0.5083224712581764,\n",
       "  0.3682281964467648,\n",
       "  0.2817965637222384,\n",
       "  0.3678045546241805,\n",
       "  0.17847447497973362],\n",
       " [0.26356792983684185,\n",
       "  0.25545447000204574,\n",
       "  0.38065219308485504,\n",
       "  0.4528441415476885,\n",
       "  0.583594099829324,\n",
       "  0.28443280297897305,\n",
       "  0.20629477314671424,\n",
       "  0.5158605938807076],\n",
       " [0.16696360529455234,\n",
       "  0.5106032492345459,\n",
       "  0.24767361696812512,\n",
       "  0.13051752279890386,\n",
       "  0.41795937516106385,\n",
       "  0.536415427920909,\n",
       "  0.3622461422754052,\n",
       "  0.5312381046010971],\n",
       " [0.6949588763645287,\n",
       "  0.1745667011103254,\n",
       "  0.42925886261698437,\n",
       "  0.11566038249736335,\n",
       "  0.2117306245971566,\n",
       "  0.48052635774776004,\n",
       "  0.9355341751248436,\n",
       "  0.42923221841103854],\n",
       " [0.11294069464527608,\n",
       "  0.4043864213562358,\n",
       "  0.21874089884443723,\n",
       "  0.024918610695095336,\n",
       "  0.3580495066575715,\n",
       "  0.33753295529810146,\n",
       "  0.17218391269768885,\n",
       "  0.6082914769629864],\n",
       " [0.35498501726528114,\n",
       "  0.12461027693787646,\n",
       "  0.20745521511466558,\n",
       "  0.6832812409295458,\n",
       "  0.4303356874321688,\n",
       "  0.3413453245395873,\n",
       "  0.30241846838729014,\n",
       "  0.7340548799055118],\n",
       " [0.44069219680815314,\n",
       "  0.5371376261775923,\n",
       "  0.40640829187375704,\n",
       "  0.39187290647573625,\n",
       "  0.3680229644618194,\n",
       "  0.1301404048382503,\n",
       "  0.17705344423008196,\n",
       "  0.24562445105091282],\n",
       " [0.3810916201836514,\n",
       "  0.16804416961523796,\n",
       "  0.486430629825294,\n",
       "  0.41615946658710845,\n",
       "  0.4468758618189642,\n",
       "  0.21825295260886401,\n",
       "  0.558576574309811,\n",
       "  0.5901761904518812],\n",
       " [0.31526337409992017,\n",
       "  0.4322203722111835,\n",
       "  0.2352440941437529,\n",
       "  0.32407133600301086,\n",
       "  0.6336833487653791,\n",
       "  0.4961970952659696,\n",
       "  0.4037380678327054,\n",
       "  0.13999177860654283],\n",
       " [0.27901952682742576,\n",
       "  0.6335967298273617,\n",
       "  0.18941899745131907,\n",
       "  0.3206294063350459,\n",
       "  0.13778533747878102,\n",
       "  0.5121428267182229,\n",
       "  0.567644932305279,\n",
       "  0.4974743744479536],\n",
       " [0.4574340667802953,\n",
       "  0.00307976686404029,\n",
       "  0.7195067353703197,\n",
       "  0.5148966409610694,\n",
       "  0.11375562596630329,\n",
       "  0.22445224014780327,\n",
       "  0.5389877512970459,\n",
       "  0.09023836527967422],\n",
       " [0.6727040264960646,\n",
       "  0.12119446121420228,\n",
       "  0.6594561893010701,\n",
       "  0.5583675780917104,\n",
       "  0.025248806044463692,\n",
       "  0.17115822543364234,\n",
       "  0.16119001255361506,\n",
       "  0.7783239174113588],\n",
       " [0.2891771664387453,\n",
       "  0.4585124557109625,\n",
       "  0.21926776166292178,\n",
       "  0.2420535379490469,\n",
       "  0.3943538223969533,\n",
       "  0.8332603133100343,\n",
       "  0.40128387190404163,\n",
       "  0.09059545065055925],\n",
       " [0.4269647561044342,\n",
       "  0.23142189874337402,\n",
       "  0.315218906524402,\n",
       "  0.3192299462128413,\n",
       "  0.30062930064319515,\n",
       "  0.6559993855216163,\n",
       "  0.252761106744557,\n",
       "  0.3110426966720018],\n",
       " [0.29003308462325483,\n",
       "  0.6631956230817858,\n",
       "  0.45063839642206127,\n",
       "  0.5248237950647122,\n",
       "  0.6279021824602028,\n",
       "  0.3489138592290672,\n",
       "  0.51303487576403,\n",
       "  0.981333738863993],\n",
       " [0.4226909612066514,\n",
       "  0.36321195298362186,\n",
       "  0.6262219300796229,\n",
       "  0.6116535564157719,\n",
       "  0.08833808887510004,\n",
       "  0.37744814448970815,\n",
       "  0.08027182352118807,\n",
       "  0.10945187682317566],\n",
       " [0.3232293467757808,\n",
       "  0.4700697496854206,\n",
       "  0.08080697126243049,\n",
       "  0.24374174915487412,\n",
       "  1.0668326175845007,\n",
       "  0.38762834347857655,\n",
       "  0.26864406253140855,\n",
       "  0.07908696525509228],\n",
       " [0.5964150018941207,\n",
       "  0.38484533310168856,\n",
       "  0.29255523049372023,\n",
       "  0.874059255689086,\n",
       "  0.27453254023712514,\n",
       "  0.7184656637080304,\n",
       "  0.3173892852090909,\n",
       "  0.7561110270025853],\n",
       " [0.22786375269654668,\n",
       "  0.4108742585770279,\n",
       "  0.6280676565787361,\n",
       "  0.11979260281106213,\n",
       "  0.1975538732083191,\n",
       "  -0.0581043651754992,\n",
       "  0.3047595774128107,\n",
       "  0.4093940846499532],\n",
       " [-0.0767052846091677,\n",
       "  0.5374057969552406,\n",
       "  0.40549329627984143,\n",
       "  0.25825785383973837,\n",
       "  0.18356434807080654,\n",
       "  0.12864524765111127,\n",
       "  0.19153026205311552,\n",
       "  0.2931677585280843],\n",
       " [0.14990285949309512,\n",
       "  0.5688998870570082,\n",
       "  0.5196634407681661,\n",
       "  0.14908276105437493,\n",
       "  0.2886701173842501,\n",
       "  0.43069510075346307,\n",
       "  0.2500073269251877,\n",
       "  0.22156091473702988],\n",
       " [0.3988001232164587,\n",
       "  0.40225207900973575,\n",
       "  0.5367037059702752,\n",
       "  0.2889746309147683,\n",
       "  0.24103242189452853,\n",
       "  0.12829930279140908,\n",
       "  0.1822449111492789,\n",
       "  0.8862121923741896],\n",
       " [0.6469949272316423,\n",
       "  0.3100375108249427,\n",
       "  0.20645691211066355,\n",
       "  0.18736380320373458,\n",
       "  0.5378077186654208,\n",
       "  0.10203123525892543,\n",
       "  0.1779314057837141,\n",
       "  0.3698320530179319],\n",
       " [0.48686994168895004,\n",
       "  0.18987726951233974,\n",
       "  0.32792086584520685,\n",
       "  0.4755396981164995,\n",
       "  0.046710491270188226,\n",
       "  0.24577763807568664,\n",
       "  0.18629679459954052,\n",
       "  -0.03493310134803331],\n",
       " [0.5815977727478113,\n",
       "  0.32613454922492524,\n",
       "  0.24400147805270322,\n",
       "  0.47044371737130225,\n",
       "  0.3674668382069299,\n",
       "  0.19332127057921655,\n",
       "  0.04835269499713248,\n",
       "  0.46236286721442826],\n",
       " [0.3575650985564828,\n",
       "  0.6528084640249856,\n",
       "  0.48233555434842423,\n",
       "  0.5521710178673408,\n",
       "  0.31509662283018136,\n",
       "  0.14460273166611534,\n",
       "  0.1484981209535177,\n",
       "  0.16663807327844915],\n",
       " [0.3303431323101956,\n",
       "  0.5677950420749138,\n",
       "  0.24149025172690675,\n",
       "  0.5866682834491663,\n",
       "  0.48933968355905755,\n",
       "  0.19987606410572456,\n",
       "  0.062085861501320144,\n",
       "  0.26907867597647495],\n",
       " [0.49552409970247846,\n",
       "  0.1267401046103039,\n",
       "  0.10233426523543132,\n",
       "  0.3978167483718795,\n",
       "  0.439770856272053,\n",
       "  0.20421677258188728,\n",
       "  0.44901502143447036,\n",
       "  0.3772189398080955],\n",
       " [0.20477421910205315,\n",
       "  0.3393885089874339,\n",
       "  0.28489753188213957,\n",
       "  0.42005767110823805,\n",
       "  0.42329294858401795,\n",
       "  0.18654911352791648,\n",
       "  0.44140478062300964,\n",
       "  0.3558595745766244],\n",
       " [-0.0857796705153539,\n",
       "  0.4305673659272944,\n",
       "  0.23571360140952735,\n",
       "  0.33839833509259165,\n",
       "  0.6993103108400313,\n",
       "  0.9691201490854916,\n",
       "  0.13595990812400963,\n",
       "  0.8918815461907684],\n",
       " [0.2551987185951985,\n",
       "  0.26067513037699686,\n",
       "  0.24405626527316523,\n",
       "  0.06598230579790261,\n",
       "  0.3840525586625265,\n",
       "  0.21061498647550608,\n",
       "  0.5602784009744995,\n",
       "  0.28823597303906373],\n",
       " [0.24560865134645332,\n",
       "  0.349770286030122,\n",
       "  0.2740888292180816,\n",
       "  0.36006808900897314,\n",
       "  0.4931320329245999,\n",
       "  0.25519583845579974,\n",
       "  0.19892185913218702,\n",
       "  0.24400691745484338],\n",
       " [0.39025941014269394,\n",
       "  0.18825255176863687,\n",
       "  0.3068162561076217,\n",
       "  0.6791976002116138,\n",
       "  0.42034230509597637,\n",
       "  0.25545562735797517,\n",
       "  0.20848471678097913,\n",
       "  0.38987025049009394],\n",
       " [0.18286757815650062,\n",
       "  0.241885190304175,\n",
       "  0.5475048839470624,\n",
       "  0.05829125356061407,\n",
       "  0.2280753872277808,\n",
       "  0.2762070972795209,\n",
       "  0.4162373931085456,\n",
       "  0.2540443381918032],\n",
       " [0.5906521484099478,\n",
       "  0.15364407026628935,\n",
       "  0.7224238286279809,\n",
       "  0.26821552145397937,\n",
       "  0.14137870057361546,\n",
       "  0.36189012180949764,\n",
       "  0.37954779539802946,\n",
       "  0.18825096906408714],\n",
       " [0.2670515260721475,\n",
       "  0.22513064434039054,\n",
       "  0.2095523338200863,\n",
       "  0.5711493285550552,\n",
       "  0.35370907975971877,\n",
       "  0.2685696487714353,\n",
       "  0.15217939402842662,\n",
       "  0.7393374009047347],\n",
       " [0.30422917215153844,\n",
       "  0.10356154589902813,\n",
       "  0.1796319599935164,\n",
       "  0.2975117270040327,\n",
       "  0.14928510295790806,\n",
       "  0.21302798466634487,\n",
       "  0.3623842534332634,\n",
       "  0.08101834262839513],\n",
       " [0.4535618550461705,\n",
       "  0.3155709553831044,\n",
       "  0.3743028842406837,\n",
       "  0.43196780684064634,\n",
       "  0.38728642850664063,\n",
       "  0.11291427286439643,\n",
       "  0.7141042267399819,\n",
       "  0.29134211655622233],\n",
       " [0.02144792978953171,\n",
       "  0.3131478987382821,\n",
       "  0.1738591474541682,\n",
       "  0.2130761232666506,\n",
       "  0.44460421642547576,\n",
       "  0.22499674314188156,\n",
       "  0.6912212080154526,\n",
       "  0.3308467889714925],\n",
       " [0.4114441085103523,\n",
       "  0.14783242187429155,\n",
       "  0.31552097657628525,\n",
       "  0.84068062851122,\n",
       "  0.3599825567642221,\n",
       "  0.04107830599347522,\n",
       "  0.5073123740921952,\n",
       "  0.11364602315347727],\n",
       " [0.6264551793975033,\n",
       "  0.03508030412558294,\n",
       "  0.3898667275575316,\n",
       "  0.22807476709371213,\n",
       "  0.2582855035242832,\n",
       "  0.9812302280486013,\n",
       "  0.26853068970325616,\n",
       "  -0.0691797704886399],\n",
       " [0.1467610611830712,\n",
       "  0.24343176938226938,\n",
       "  0.44507118482278724,\n",
       "  0.6290723387937852,\n",
       "  0.4465032273750268,\n",
       "  0.33799727106161115,\n",
       "  0.3969692413700391,\n",
       "  0.16425304442442457],\n",
       " [0.29753867762837466,\n",
       "  0.28798400378506867,\n",
       "  0.5622778089014216,\n",
       "  0.4212976201459525,\n",
       "  0.5426808481971014,\n",
       "  0.1542761266522493,\n",
       "  0.16145975173867078,\n",
       "  0.22367930060574734],\n",
       " [-0.022159281781465978,\n",
       "  0.38941657019838766,\n",
       "  0.4041871699153403,\n",
       "  0.36454680320184957,\n",
       "  0.29232082840566936,\n",
       "  0.14513755065063647,\n",
       "  0.49814042258326163,\n",
       "  0.4107252710048968],\n",
       " [0.35339132695864617,\n",
       "  0.0986821158663036,\n",
       "  0.30618867473904715,\n",
       "  0.2868913017012981,\n",
       "  0.3497863842892372,\n",
       "  0.39404208937552654,\n",
       "  0.5640801730200761,\n",
       "  0.09211671587229286],\n",
       " [0.11161531146332582,\n",
       "  0.2720657761301512,\n",
       "  0.34465561294550673,\n",
       "  0.4134909914524328,\n",
       "  0.3902654229967822,\n",
       "  0.09109765106601284,\n",
       "  0.27192605389321894,\n",
       "  0.1368001511386227],\n",
       " [0.5216066081338353,\n",
       "  0.1450448135942095,\n",
       "  0.13333299976705185,\n",
       "  0.6052251837197351,\n",
       "  0.6118207209226973,\n",
       "  -0.0009645655369199924,\n",
       "  0.8123816867087044,\n",
       "  0.6393563911384375],\n",
       " [0.7162365139802211,\n",
       "  0.5797922013935031,\n",
       "  0.2463321278725862,\n",
       "  0.5864894479106288,\n",
       "  0.17985662402304023,\n",
       "  0.47681264043984606,\n",
       "  0.6293466928500243,\n",
       "  0.5782468233407059],\n",
       " [0.3524066287730821,\n",
       "  0.2902794285278474,\n",
       "  0.46456145730852205,\n",
       "  0.16741454578229387,\n",
       "  0.45856107389502676,\n",
       "  0.3505333493396671,\n",
       "  0.567456243942375,\n",
       "  0.36622024583981816],\n",
       " [0.20452983127731708,\n",
       "  0.442229762065343,\n",
       "  0.3940186988300087,\n",
       "  0.5037141516074,\n",
       "  0.43077868060316993,\n",
       "  0.49744892063497714,\n",
       "  0.45992063664432026,\n",
       "  0.1952530812719051],\n",
       " [0.22466376083215966,\n",
       "  0.38233356810762087,\n",
       "  0.35424202239419555,\n",
       "  0.16300625045867895,\n",
       "  0.20645229069902468,\n",
       "  0.22429260597135833,\n",
       "  0.2555268515720773,\n",
       "  0.4308470056162532],\n",
       " [0.19429097061618855,\n",
       "  0.7302064285184717,\n",
       "  0.2695499327933599,\n",
       "  0.21739365455347537,\n",
       "  0.08527018500984918,\n",
       "  0.2943263048047022,\n",
       "  0.09251927905384827,\n",
       "  -0.04707164721315376],\n",
       " [0.2784480497343935,\n",
       "  0.2002138178613355,\n",
       "  0.5430643637746285,\n",
       "  0.4696694865685325,\n",
       "  0.2869443528911349,\n",
       "  0.21953834365682,\n",
       "  0.6457821290840329,\n",
       "  0.3174821680072599],\n",
       " [0.10046029975467766,\n",
       "  0.41734073152773465,\n",
       "  0.1526436423794953,\n",
       "  0.1358134401903732,\n",
       "  0.3191939556021837,\n",
       "  0.39342366632196835,\n",
       "  0.4672849600004396,\n",
       "  0.3646722035633528],\n",
       " [0.27064814115757757,\n",
       "  0.13338384157112912,\n",
       "  0.06751372201290717,\n",
       "  0.300450717204531,\n",
       "  1.067082904251773,\n",
       "  0.19000414431832346,\n",
       "  0.2372530619331808,\n",
       "  0.1458725620393552],\n",
       " [0.1558805501364577,\n",
       "  0.2686006775473537,\n",
       "  0.524877420244642,\n",
       "  0.29323801073295724,\n",
       "  0.15365120135100535,\n",
       "  0.43244417491694886,\n",
       "  0.43605194729308344,\n",
       "  0.2018619476473199],\n",
       " [0.23137331060949956,\n",
       "  0.19612761997883432,\n",
       "  0.1026512691031542,\n",
       "  0.5025390010686012,\n",
       "  0.16079177999868516,\n",
       "  0.19125438348061352,\n",
       "  0.6664987636200105,\n",
       "  0.17921500033952192],\n",
       " [0.47982668670177886,\n",
       "  0.5677020878647053,\n",
       "  0.2765547414863981,\n",
       "  0.12121674578816632,\n",
       "  0.12331607574601025,\n",
       "  0.42194598658294047,\n",
       "  0.10282499139030365,\n",
       "  0.7854457826851351],\n",
       " [0.43627941957181726,\n",
       "  0.278048620723323,\n",
       "  0.5456001179412518,\n",
       "  0.4445588212618171,\n",
       "  0.21925409826861686,\n",
       "  0.08049089489322339,\n",
       "  0.1441719586543568,\n",
       "  0.2692792259681086],\n",
       " [0.4391653362358617,\n",
       "  0.3260641172004144,\n",
       "  0.38092013570302186,\n",
       "  0.21183684636338918,\n",
       "  0.08754567430718972,\n",
       "  0.0988532151424783,\n",
       "  0.4286720001746569,\n",
       "  0.2766179057682152],\n",
       " [0.3392797076982453,\n",
       "  0.30710693507184494,\n",
       "  0.3406169420840759,\n",
       "  0.22942084958445605,\n",
       "  0.30556448645434425,\n",
       "  0.42986402407443464,\n",
       "  0.29592013005278445,\n",
       "  0.13167931343670927],\n",
       " [0.3694936606245845,\n",
       "  0.3303379970068699,\n",
       "  0.05802544901569389,\n",
       "  0.34433298593640727,\n",
       "  0.4774142317866401,\n",
       "  0.19435961833019588,\n",
       "  0.6191573911391686,\n",
       "  0.4967463183970271],\n",
       " [0.3713183854341314,\n",
       "  0.5987782711321187,\n",
       "  0.4497882301288666,\n",
       "  0.24153159763346477,\n",
       "  0.4733696036241477,\n",
       "  0.28155160442219557,\n",
       "  0.5440574953107301,\n",
       "  0.48928257725035923],\n",
       " [0.5643353413085337,\n",
       "  0.17150154062690293,\n",
       "  0.5859389656427688,\n",
       "  0.3121330384604055,\n",
       "  0.15326055255871185,\n",
       "  0.09033168683670215,\n",
       "  0.7035904229662503,\n",
       "  1.1381792510629296],\n",
       " [0.1683412398993912,\n",
       "  0.18102032490352837,\n",
       "  0.17918511500339435,\n",
       "  0.4758776535277127,\n",
       "  0.28023665438136564,\n",
       "  0.053331829257453056,\n",
       "  0.4600861054122337,\n",
       "  0.311295577712148],\n",
       " [0.33084632949535264,\n",
       "  0.26412063508182826,\n",
       "  0.6703635091088475,\n",
       "  0.45589595891534596,\n",
       "  0.15247632901431227,\n",
       "  0.3853255204107538,\n",
       "  0.7104409716278969,\n",
       "  0.17183505833630874],\n",
       " [0.5237443099865204,\n",
       "  0.30868355092683875,\n",
       "  0.1781295043877672,\n",
       "  0.5525089873998807,\n",
       "  0.3644789271480044,\n",
       "  0.4848778288914223,\n",
       "  0.32699586336345443,\n",
       "  0.10141336715744856],\n",
       " [0.39224681660090854,\n",
       "  0.39023718884475533,\n",
       "  0.021395230004717637,\n",
       "  0.31858249298043717,\n",
       "  0.45139971871826723,\n",
       "  0.20949527318977734,\n",
       "  0.25296297420415587,\n",
       "  0.23400835673156278],\n",
       " [0.1680762087591209,\n",
       "  0.5977852575951753,\n",
       "  0.3560648816074668,\n",
       "  0.07579256204224682,\n",
       "  0.9625817384760641,\n",
       "  0.6288736278701648,\n",
       "  0.40970505777653,\n",
       "  0.25379306271910423],\n",
       " [0.3012853798731827,\n",
       "  0.2766395847767771,\n",
       "  -0.006650385542635079,\n",
       "  0.27287273799628725,\n",
       "  0.4578748012419587,\n",
       "  0.2085009313666073,\n",
       "  0.2494415924902465,\n",
       "  0.35417522572721993],\n",
       " [0.3118919783488847,\n",
       "  0.7470386456040622,\n",
       "  0.25997017859412636,\n",
       "  0.26656389060122043,\n",
       "  0.4929446530761519,\n",
       "  0.4227139821011681,\n",
       "  0.35706251880153655,\n",
       "  0.018608169187965106],\n",
       " [0.3186331651931389,\n",
       "  0.1715940954663821,\n",
       "  0.08036331253498909,\n",
       "  0.27986437858978425,\n",
       "  0.27670879301444956,\n",
       "  0.40858835742887245,\n",
       "  0.38973686807035607,\n",
       "  0.4753979580534058],\n",
       " [0.21502482996660421,\n",
       "  0.6195484444823456,\n",
       "  0.299863640555306,\n",
       "  0.39397172094233873,\n",
       "  0.2426825925514864,\n",
       "  0.1367048179386142,\n",
       "  0.5238498497923908,\n",
       "  0.15190631799324733],\n",
       " [0.29606875888362705,\n",
       "  0.12949422641429884,\n",
       "  0.7090856944463225,\n",
       "  0.27157743277111296,\n",
       "  1.0496780300478883,\n",
       "  0.3316277220318695,\n",
       "  0.23193338466326133,\n",
       "  0.049556637516357964],\n",
       " [0.2678720185374613,\n",
       "  0.006013649976952829,\n",
       "  0.5070700371934138,\n",
       "  0.28109613131342126,\n",
       "  0.48724851211422254,\n",
       "  0.6787402444865813,\n",
       "  0.22923522514081804,\n",
       "  0.2588227669549327],\n",
       " [0.6487628491435018,\n",
       "  0.40528537796745995,\n",
       "  0.3218671563951505,\n",
       "  0.2011911874068246,\n",
       "  0.61786625914629,\n",
       "  0.5012414913953798,\n",
       "  0.3434087911023447,\n",
       "  0.4000591101437966],\n",
       " [0.46907453306580954,\n",
       "  0.4054479892277163,\n",
       "  0.07287440136048072,\n",
       "  0.2840781025210381,\n",
       "  0.07823872415042653,\n",
       "  0.10827858245946541,\n",
       "  0.2895982635294481,\n",
       "  0.3166322214528127],\n",
       " [0.12053681931940728,\n",
       "  0.4581916387269833,\n",
       "  0.45460921313304026,\n",
       "  0.41146116978930486,\n",
       "  0.21242228347671419,\n",
       "  0.32643939058457905,\n",
       "  0.6402757960093759,\n",
       "  0.2784204929546972],\n",
       " [0.28247252052688127,\n",
       "  0.5469504438887917,\n",
       "  0.35413835419769873,\n",
       "  0.16095370929555033,\n",
       "  0.2993916569291703,\n",
       "  0.1901191583103397,\n",
       "  0.6216366717302291,\n",
       "  0.6974149000363342],\n",
       " [0.13888009746807664,\n",
       "  0.4301503880675894,\n",
       "  0.6302156521866193,\n",
       "  0.19687544247988328,\n",
       "  0.2640381173260062,\n",
       "  0.4666318817824549,\n",
       "  0.04097594890270084,\n",
       "  0.03242316979677974],\n",
       " [0.23702766056539437,\n",
       "  0.25923851413871063,\n",
       "  0.12788777640571014,\n",
       "  0.3495872982807735,\n",
       "  0.5625019978902056,\n",
       "  0.6196506601780984,\n",
       "  0.5696650860452703,\n",
       "  0.16471923335237337],\n",
       " [0.5880444848209011,\n",
       "  0.17940254771799127,\n",
       "  0.32949676412619583,\n",
       "  0.45635655358674154,\n",
       "  0.1134548557063878,\n",
       "  0.2219127069127513,\n",
       "  0.5929449903085369,\n",
       "  0.38874655418948767],\n",
       " [0.30983670956278064,\n",
       "  0.15796093389545257,\n",
       "  0.18230274479991304,\n",
       "  0.4343213836007562,\n",
       "  0.32828710438444486,\n",
       "  0.21948461331211738,\n",
       "  0.40811351765887455,\n",
       "  0.5135321625327597],\n",
       " [0.3045811768790314,\n",
       "  0.5069304986296003,\n",
       "  0.5254563568032039,\n",
       "  0.41190015107266853,\n",
       "  0.28558208240579724,\n",
       "  0.2519245516561967,\n",
       "  0.6461001197306842,\n",
       "  0.17689497506123195],\n",
       " [0.33212584476294815,\n",
       "  0.17938893634748104,\n",
       "  0.7266274918639628,\n",
       "  0.4102875554125735,\n",
       "  0.30244776458133543,\n",
       "  0.0849764800446432,\n",
       "  0.45783453794185036,\n",
       "  0.46035460196284106],\n",
       " [0.1680273716902077,\n",
       "  -0.03112156382489999,\n",
       "  0.19501666409112664,\n",
       "  0.4515139550755418,\n",
       "  0.3826620315606964,\n",
       "  0.4113842838919272,\n",
       "  0.4229920866966643,\n",
       "  0.37457547487553455],\n",
       " [0.5448336291719199,\n",
       "  0.1270978945463527,\n",
       "  0.47554623843587396,\n",
       "  0.45447650637646336,\n",
       "  -0.018242531857625668,\n",
       "  0.49864755138354,\n",
       "  0.2754147464434773,\n",
       "  0.39393845359494545],\n",
       " [0.26277877474686295,\n",
       "  0.16559874155208676,\n",
       "  0.3421301246239666,\n",
       "  0.7402436841479998,\n",
       "  0.1347108653348546,\n",
       "  0.4112209123108471,\n",
       "  0.16253808735595746,\n",
       "  0.08081604467414122],\n",
       " [0.32679114580595126,\n",
       "  0.3815487453896392,\n",
       "  0.12764843871880946,\n",
       "  0.2286538118552699,\n",
       "  0.3175439261447333,\n",
       "  0.3549333473998546,\n",
       "  0.6195454014825442,\n",
       "  0.5650341335735932],\n",
       " [0.2587086249382554,\n",
       "  0.1261060874467512,\n",
       "  0.1663241081811347,\n",
       "  0.6003764728741118,\n",
       "  0.30727442954881945,\n",
       "  0.26503829613019175,\n",
       "  0.4211607517874003,\n",
       "  0.2937981717181432],\n",
       " [0.6718256747653554,\n",
       "  0.41093772522829525,\n",
       "  0.3190935313506722,\n",
       "  0.5221539965448179,\n",
       "  0.5703260319070707,\n",
       "  0.4047040020920345,\n",
       "  0.3346873976553114,\n",
       "  0.3089560782025279],\n",
       " [0.08096861734487285,\n",
       "  0.39607039485499296,\n",
       "  0.5174543612001349,\n",
       "  0.43160372907987754,\n",
       "  0.24298832001392362,\n",
       "  0.06968683393170615,\n",
       "  0.3913035969556294,\n",
       "  0.08939283320655854],\n",
       " [0.13887294070023296,\n",
       "  0.4517241828967546,\n",
       "  0.3098262188727365,\n",
       "  0.41721953168138515,\n",
       "  0.38607461921358727,\n",
       "  0.18156004850639934,\n",
       "  0.3514019833089947,\n",
       "  0.5668413787712997],\n",
       " [0.11290517539143792,\n",
       "  0.435525636507875,\n",
       "  0.27947128849479147,\n",
       "  0.4324650438122028,\n",
       "  0.23855139044050994,\n",
       "  0.1695034206774552,\n",
       "  1.0891883493480574,\n",
       "  0.08748946770586659],\n",
       " [0.5784907490720474,\n",
       "  0.3399133065307154,\n",
       "  0.496659125713752,\n",
       "  0.45912861833724766,\n",
       "  0.40042802603539274,\n",
       "  0.3840161499522888,\n",
       "  0.23638732845126664,\n",
       "  0.3657451189819428],\n",
       " [0.32301380343400266,\n",
       "  0.4201715984523178,\n",
       "  0.3367415599088761,\n",
       "  0.35326293634423855,\n",
       "  0.39046189517024943,\n",
       "  0.33204834629911734,\n",
       "  0.4070132751527193,\n",
       "  0.24259591295948232],\n",
       " [0.2357696662464396,\n",
       "  0.47748720290094,\n",
       "  0.07268131671462814,\n",
       "  0.5175076298693565,\n",
       "  0.11513195264797274,\n",
       "  0.4052844280122064,\n",
       "  0.4115451018205556,\n",
       "  0.1607269830855169],\n",
       " [0.22337739708052548,\n",
       "  0.5400956995349385,\n",
       "  0.3028976777582756,\n",
       "  0.23758530462619146,\n",
       "  -0.020238825710665,\n",
       "  0.24498910067730165,\n",
       "  0.23934294725547167,\n",
       "  0.17312863378559368],\n",
       " [0.4959165861485785,\n",
       "  0.6441266168364352,\n",
       "  0.345047633870207,\n",
       "  0.2472853804651679,\n",
       "  0.9017074987688858,\n",
       "  0.33611810704561373,\n",
       "  0.3333223961601457,\n",
       "  0.39071972849907327],\n",
       " [0.24076872999289486,\n",
       "  0.3921491087846466,\n",
       "  0.38109704911494635,\n",
       "  0.3340187017755075,\n",
       "  0.3094167286521812,\n",
       "  0.5321971104110504,\n",
       "  0.18417120748658536,\n",
       "  0.522929498644745],\n",
       " [0.32252649130644145,\n",
       "  0.0742421472797248,\n",
       "  0.23101293998537573,\n",
       "  0.31339900492037887,\n",
       "  0.24796623862736833,\n",
       "  0.14557410313999353,\n",
       "  0.1904389836453662,\n",
       "  0.19599612174901315],\n",
       " [0.48302786025419336,\n",
       "  0.46345950292636906,\n",
       "  0.11522425324440398,\n",
       "  0.2520514043132863,\n",
       "  0.4143280113501973,\n",
       "  0.3219447962117943,\n",
       "  0.30937694049892056,\n",
       "  0.478939145504208],\n",
       " [0.17874856431093966,\n",
       "  0.2908847627974975,\n",
       "  0.5266481911748223,\n",
       "  0.4587005380426272,\n",
       "  0.18637860378929924,\n",
       "  -0.08187556244625063,\n",
       "  1.054512089953769,\n",
       "  0.547422846443938],\n",
       " [0.34332173891277656,\n",
       "  0.23221625012474034,\n",
       "  0.3779500096130438,\n",
       "  0.48954363565266945,\n",
       "  0.349401723182276,\n",
       "  0.09944646091522591,\n",
       "  0.22908661207898345,\n",
       "  0.4298160011432403],\n",
       " [0.4955151963323612,\n",
       "  0.795382292564871,\n",
       "  0.17271931111696626,\n",
       "  0.3564850008598386,\n",
       "  0.5500123219085687,\n",
       "  0.3399632409853639,\n",
       "  0.26401035791684924,\n",
       "  0.2305925866145193],\n",
       " [0.8669248853001577,\n",
       "  0.031886222418676224,\n",
       "  0.24764244611495206,\n",
       "  0.20306519271302048,\n",
       "  0.22182456700711015,\n",
       "  0.4048327412531517,\n",
       "  0.552523600309526,\n",
       "  -0.016590579717913313],\n",
       " [0.13731218190394162,\n",
       "  0.4549533961594815,\n",
       "  0.11100324919014304,\n",
       "  0.5352630091472615,\n",
       "  0.3568110479961598,\n",
       "  0.4321304046051393,\n",
       "  0.3925653646853723,\n",
       "  0.2253204064790394],\n",
       " [0.7686638811933529,\n",
       "  0.24289930917997868,\n",
       "  0.19553638884833935,\n",
       "  0.301076201179509,\n",
       "  0.129601087784197,\n",
       "  0.3116092066621284,\n",
       "  0.08084748268211804,\n",
       "  0.34045720989270317],\n",
       " [0.3014547392099961,\n",
       "  0.29688181554837095,\n",
       "  0.5176366047318398,\n",
       "  0.705860197062052,\n",
       "  0.6944223932213851,\n",
       "  0.309826433041095,\n",
       "  1.063307253425031,\n",
       "  0.29124493682842134],\n",
       " [0.38874576518149656,\n",
       "  0.2183640423949564,\n",
       "  0.40333783017804514,\n",
       "  0.1956758570130987,\n",
       "  0.11328101490751835,\n",
       "  0.48852882276964565,\n",
       "  0.20218911810521317,\n",
       "  0.008113260311039783],\n",
       " [0.2634444355342941,\n",
       "  0.5309668196455979,\n",
       "  0.07584896593314557,\n",
       "  0.2641816230465674,\n",
       "  0.32712105769601774,\n",
       "  0.4932293132613838,\n",
       "  0.2585572166614486,\n",
       "  0.4214998151097544],\n",
       " [0.2832295524819674,\n",
       "  0.18570189193069597,\n",
       "  0.6730088203983959,\n",
       "  0.15343633564835468,\n",
       "  0.34371825710125,\n",
       "  0.15355026311698425,\n",
       "  0.4499323632697941,\n",
       "  0.07712415934348593],\n",
       " [0.3101400864791142,\n",
       "  0.06260455448348541,\n",
       "  0.6499006667950157,\n",
       "  0.2714755514295196,\n",
       "  0.3056614775978696,\n",
       "  0.4580113007535306,\n",
       "  0.3553641634328597,\n",
       "  0.3780926262539077],\n",
       " [0.49542870012102935,\n",
       "  0.28783774344206137,\n",
       "  0.3134325190585043,\n",
       "  0.5566135190604139,\n",
       "  0.26559618107751004,\n",
       "  0.10537077623665819,\n",
       "  -0.06663301084390887,\n",
       "  0.17205133450953],\n",
       " [0.6938228524679583,\n",
       "  0.38698527366669194,\n",
       "  0.37356501505715844,\n",
       "  0.4349027721541072,\n",
       "  0.13726608187056555,\n",
       "  0.05304698895366581,\n",
       "  0.13971424248545866,\n",
       "  0.4047108807782429],\n",
       " [0.3939757182677497,\n",
       "  0.20742354742950392,\n",
       "  0.7430117671811423,\n",
       "  -0.055149564967374776,\n",
       "  0.16380351284219996,\n",
       "  -0.030168257669431098,\n",
       "  0.23959283407368695,\n",
       "  0.08032696291629114],\n",
       " [0.32804255145975647,\n",
       "  0.19693898281311653,\n",
       "  0.31169008879322846,\n",
       "  0.06623863063564128,\n",
       "  0.21882820289156976,\n",
       "  0.39339466817503005,\n",
       "  0.4917652048314245,\n",
       "  0.40757275356669753],\n",
       " [0.2704353310172401,\n",
       "  0.2214408590988165,\n",
       "  0.0675817733683411,\n",
       "  0.41953579587742296,\n",
       "  0.040912980462970455,\n",
       "  0.07324175219649114,\n",
       "  0.4598665089988607,\n",
       "  0.027814942089260566],\n",
       " [0.2389976464935919,\n",
       "  -0.02555747688336605,\n",
       "  0.2641325463990369,\n",
       "  0.48089833933218334,\n",
       "  0.5065920360394545,\n",
       "  0.21770526781631105,\n",
       "  0.14789775567816574,\n",
       "  0.8127328631705899],\n",
       " [0.24215529197091776,\n",
       "  0.2271577416321566,\n",
       "  0.3787571788021,\n",
       "  0.6179248539478334,\n",
       "  0.5797096260973259,\n",
       "  0.49885185189615416,\n",
       "  0.30001776035256,\n",
       "  0.19337689231267974],\n",
       " [0.3891607672265467,\n",
       "  0.2774642420602711,\n",
       "  0.5161246457061079,\n",
       "  0.36148343991855225,\n",
       "  0.4698249102061537,\n",
       "  0.34510347004407793,\n",
       "  0.47000827067768647,\n",
       "  0.36785815977449526],\n",
       " [0.3282897216707291,\n",
       "  0.424165746102783,\n",
       "  0.2730717456686977,\n",
       "  0.31065812761051487,\n",
       "  0.3996614858936606,\n",
       "  0.6700877252575564,\n",
       "  0.47154421738461816,\n",
       "  0.2471654945643853],\n",
       " [0.4083853456637034,\n",
       "  0.008992567859261757,\n",
       "  0.5181912336832857,\n",
       "  0.14536927338307032,\n",
       "  0.21685626871069155,\n",
       "  0.24453300734045902,\n",
       "  0.15479213918319332,\n",
       "  0.17810290110416158],\n",
       " [0.38108059421848006,\n",
       "  0.18381911848703247,\n",
       "  0.2636003395820559,\n",
       "  0.10067884129034838,\n",
       "  0.2373825986468297,\n",
       "  0.36415349631719246,\n",
       "  0.5385864614140607,\n",
       "  0.26661627349311273],\n",
       " [0.43855683262089473,\n",
       "  0.25672645578342124,\n",
       "  0.4503992582953493,\n",
       "  0.4824496714220051,\n",
       "  0.20697699208062073,\n",
       "  0.5237768861438633,\n",
       "  -0.05350438580064664,\n",
       "  0.22998728308442679],\n",
       " [0.22437798900396572,\n",
       "  0.6267343360511024,\n",
       "  0.05122688434344483,\n",
       "  0.6048266771960225,\n",
       "  0.034056850464646196,\n",
       "  0.3315065129138526,\n",
       "  0.1447873278722447,\n",
       "  0.5109831145053727],\n",
       " [0.5015402263447551,\n",
       "  0.2674778295438915,\n",
       "  0.4361514950654036,\n",
       "  0.4916938310239298,\n",
       "  0.14977252175030184,\n",
       "  0.1596373048558185,\n",
       "  0.41821311898683367,\n",
       "  0.27420950456810894],\n",
       " [0.7637520507005855,\n",
       "  0.4838828810655178,\n",
       "  0.14756057507861212,\n",
       "  0.20522448079827013,\n",
       "  0.4510887305586181,\n",
       "  0.5007995284142251,\n",
       "  0.5410724622100122,\n",
       "  0.4895194668483811],\n",
       " [0.12805219038412372,\n",
       "  0.4592060233947315,\n",
       "  0.12350814039445364,\n",
       "  0.6873033377991689,\n",
       "  0.5574579099948437,\n",
       "  0.27878546657285974,\n",
       "  0.22911435024152588,\n",
       "  0.37650641562619397],\n",
       " [0.13228323686904503,\n",
       "  0.1664843307654049,\n",
       "  0.33616554871374715,\n",
       "  0.5130852392758166,\n",
       "  0.2754448208828694,\n",
       "  0.2852075553196902,\n",
       "  0.15418454314173605,\n",
       "  0.23514704596841787],\n",
       " [0.5047266647265699,\n",
       "  0.33051763850221727,\n",
       "  0.2490874153730589,\n",
       "  0.32555184816855554,\n",
       "  0.15958031033324804,\n",
       "  0.5986481292579915,\n",
       "  0.16353628050966226,\n",
       "  0.14066927434814191],\n",
       " [0.34645995931613494,\n",
       "  0.44425395634148945,\n",
       "  0.2100127056030665,\n",
       "  0.31297474336872605,\n",
       "  0.5996183484997202,\n",
       "  1.4232933554376235,\n",
       "  0.27710148632860443,\n",
       "  0.41767267333626634],\n",
       " [0.12425053752088966,\n",
       "  0.1960067080087519,\n",
       "  0.14046175649841428,\n",
       "  -0.006567107937626471,\n",
       "  0.31375659056815786,\n",
       "  0.3190865586966596,\n",
       "  0.16272477629710166,\n",
       "  0.5238749474765306],\n",
       " [0.3281618476263572,\n",
       "  0.7440701466762939,\n",
       "  0.32597682301389264,\n",
       "  0.3298060528302223,\n",
       "  0.7706224238396753,\n",
       "  0.44124401570925365,\n",
       "  0.3551640035166985,\n",
       "  0.5463263160163297],\n",
       " [0.17307885219162475,\n",
       "  0.5528302079632431,\n",
       "  0.3530100860334331,\n",
       "  0.6041351847571582,\n",
       "  0.26017677067007244,\n",
       "  0.12944097591738868,\n",
       "  0.06098839130922439,\n",
       "  0.3717145961403172],\n",
       " [0.12032186437693806,\n",
       "  0.3770947148533434,\n",
       "  0.7695350605539256,\n",
       "  0.14826000988412474,\n",
       "  0.28078369018531196,\n",
       "  0.32065844465791915,\n",
       "  0.05437116523214885,\n",
       "  0.712749004979974],\n",
       " [0.3124267066737034,\n",
       "  0.48981890273661677,\n",
       "  0.8074316805925735,\n",
       "  0.2883284636359423,\n",
       "  0.5309884215283048,\n",
       "  0.46709727640077503,\n",
       "  0.2766304432442952,\n",
       "  0.40219062885473056],\n",
       " [0.39228138225765236,\n",
       "  0.36067732828795995,\n",
       "  0.3841646295594863,\n",
       "  0.3873316658222803,\n",
       "  0.34694207182617764,\n",
       "  0.23977288834240637,\n",
       "  0.27197160870456805,\n",
       "  0.5321374896300357],\n",
       " [0.3743207596527745,\n",
       "  0.17797471232765028,\n",
       "  0.51678603235346,\n",
       "  0.3292040114885536,\n",
       "  0.3892646699603184,\n",
       "  0.3222617136508243,\n",
       "  0.2445195890316866,\n",
       "  0.6089849852817961],\n",
       " [0.2923757926442173,\n",
       "  0.1675576608215554,\n",
       "  0.6138856126452705,\n",
       "  0.3200758721916005,\n",
       "  0.3164445754861595,\n",
       "  0.2624639473146895,\n",
       "  0.4136325482442275,\n",
       "  0.6887224094965414],\n",
       " [0.2818485060802856,\n",
       "  0.4306840550613121,\n",
       "  0.2829544973969767,\n",
       "  0.4547985337913639,\n",
       "  0.6836732184679919,\n",
       "  0.5369578078745997,\n",
       "  0.16710585265635292,\n",
       "  -0.03872074906787111],\n",
       " [0.10878897430605644,\n",
       "  0.5078746949027685,\n",
       "  0.44338020139235224,\n",
       "  0.21038119856274592,\n",
       "  0.2927994877016851,\n",
       "  0.7604865687248371,\n",
       "  0.34138831268314096,\n",
       "  0.14195072706863918],\n",
       " [0.2182827974880407,\n",
       "  0.4486725444891441,\n",
       "  0.052345172710039634,\n",
       "  0.429119171556627,\n",
       "  0.46387302227149513,\n",
       "  0.11048305737147131,\n",
       "  0.4387331367696898,\n",
       "  0.2680855382863855],\n",
       " [0.2077672924141389,\n",
       "  0.23394657368479468,\n",
       "  0.3776596461617696,\n",
       "  0.34460649705368757,\n",
       "  0.26580742525863815,\n",
       "  0.1639732457377901,\n",
       "  0.4259734166025113,\n",
       "  0.5034600403455468],\n",
       " [0.2366548647748545,\n",
       "  0.08427302387225702,\n",
       "  0.22833065997066151,\n",
       "  0.4246151490402884,\n",
       "  0.1612463256579332,\n",
       "  0.31044215705776407,\n",
       "  0.4300124728982675,\n",
       "  0.2782963347251863],\n",
       " [0.15223786247697224,\n",
       "  0.46971435927390265,\n",
       "  0.4246159679814359,\n",
       "  0.7071524461085249,\n",
       "  0.22855218735606775,\n",
       "  0.5146256330183947,\n",
       "  0.051030968253916204,\n",
       "  0.26378555326581615],\n",
       " [0.338901276424328,\n",
       "  0.3444125535366205,\n",
       "  0.22472491319650445,\n",
       "  0.7497663030651718,\n",
       "  0.46366774830533064,\n",
       "  0.26353881983629607,\n",
       "  -0.07001712352836838,\n",
       "  0.12743753832010532],\n",
       " [0.10295881356308612,\n",
       "  0.29260049467168175,\n",
       "  0.2990958588053156,\n",
       "  0.42037504881714227,\n",
       "  0.35839960466414084,\n",
       "  0.2164702893385896,\n",
       "  0.7286116555048766,\n",
       "  0.09733300590256883],\n",
       " [0.22086017089730767,\n",
       "  0.321663802520291,\n",
       "  0.640948051512438,\n",
       "  0.573346736346629,\n",
       "  0.32198291390488776,\n",
       "  0.09678304556235565,\n",
       "  0.15098487226394558,\n",
       "  0.23562895679500626],\n",
       " [0.23412568403920636,\n",
       "  0.09805537302215327,\n",
       "  0.2454650298752275,\n",
       "  0.3478618874219064,\n",
       "  0.5609090332911787,\n",
       "  0.560505516223885,\n",
       "  0.3631937121237996,\n",
       "  0.7518356667509619],\n",
       " [0.10819016956245364,\n",
       "  -0.10992475749069862,\n",
       "  0.8434021426516893,\n",
       "  0.19128853116003025,\n",
       "  0.18572686493146354,\n",
       "  0.3551422905309589,\n",
       "  0.21584328662654492,\n",
       "  0.13422589881047695],\n",
       " [0.5955033895738889,\n",
       "  0.22391317632935268,\n",
       "  0.5855904928293718,\n",
       "  0.31913212963824933,\n",
       "  0.3437925667458801,\n",
       "  0.21971286168483162,\n",
       "  0.1492606676104031,\n",
       "  0.3616754504049249],\n",
       " [0.2653393938735686,\n",
       "  0.017093718600693943,\n",
       "  0.40750669085741437,\n",
       "  0.2678808003456288,\n",
       "  0.1335563387462287,\n",
       "  0.2737243313948145,\n",
       "  0.3000475716894191,\n",
       "  0.16950712169409443],\n",
       " [0.38475769880995125,\n",
       "  0.5391179887066271,\n",
       "  0.2729467209504865,\n",
       "  0.5137127958530845,\n",
       "  0.17212634327591309,\n",
       "  0.5429515051416461,\n",
       "  0.4146353268210214,\n",
       "  0.1253332127765376],\n",
       " [0.3962503161932168,\n",
       "  0.37268394437614305,\n",
       "  0.26842074625947904,\n",
       "  0.35656292702159387,\n",
       "  0.3465374594349876,\n",
       "  0.4391354874065811,\n",
       "  0.25457930613262203,\n",
       "  0.8955728022728027],\n",
       " [0.21219203168177897,\n",
       "  0.3464331111735909,\n",
       "  0.3953884872053366,\n",
       "  0.1553338240345437,\n",
       "  0.1800594650397354,\n",
       "  0.32983995318896875,\n",
       "  0.14084320667166617,\n",
       "  0.14861890082810816],\n",
       " [0.08179279108399885,\n",
       "  0.36051903547286596,\n",
       "  0.07305325961441163,\n",
       "  0.2769824387049868,\n",
       "  0.20527817705153267,\n",
       "  0.442980612750016,\n",
       "  0.2664274648176494,\n",
       "  0.15776497075595108],\n",
       " [0.1372005207038829,\n",
       "  0.3323420622704943,\n",
       "  0.5110502947417525,\n",
       "  0.17942689313600083,\n",
       "  0.25135882937169124,\n",
       "  0.6809476387481045,\n",
       "  0.2983221058462042,\n",
       "  0.3887470096222959],\n",
       " [0.21247939828253035,\n",
       "  0.40281600437556836,\n",
       "  0.5513341704275815,\n",
       "  0.1668262889327468,\n",
       "  0.1219645539585012,\n",
       "  0.33633985341959216,\n",
       "  0.3214399927709147,\n",
       "  0.4370266426145677],\n",
       " [0.476202530372726,\n",
       "  0.687577704258405,\n",
       "  0.48958499323578003,\n",
       "  0.23592739203619806,\n",
       "  0.5128829222937114,\n",
       "  0.11891651962491182,\n",
       "  0.22985983706805327,\n",
       "  0.11067177386770127],\n",
       " [0.37447491631597063,\n",
       "  0.40556420676576094,\n",
       "  0.9597771889602493,\n",
       "  0.4347950905680709,\n",
       "  0.2887856512235243,\n",
       "  0.07992741429335365,\n",
       "  0.3476834365509022,\n",
       "  0.16663629054337795],\n",
       " [0.16261375633834194,\n",
       "  0.49261833636783053,\n",
       "  0.11618667507991227,\n",
       "  0.3817146655534283,\n",
       "  0.13936105445626787,\n",
       "  0.42156262568764835,\n",
       "  0.21840835922450183,\n",
       "  0.2849620510978743],\n",
       " [0.045992149491708295,\n",
       "  0.3144105439193052,\n",
       "  0.39648922573391654,\n",
       "  0.2996999656404441,\n",
       "  0.38475968534610683,\n",
       "  0.34105161778949233,\n",
       "  0.39727202171577114,\n",
       "  0.260380777951033],\n",
       " [0.2906853243688978,\n",
       "  0.5851624314131676,\n",
       "  0.3191595118394869,\n",
       "  0.298274259063914,\n",
       "  0.0893215879912963,\n",
       "  0.0441272646473075,\n",
       "  0.1744948307850541,\n",
       "  0.20247593928453536],\n",
       " [0.3277274124898225,\n",
       "  0.15347548292450744,\n",
       "  0.393405479524405,\n",
       "  0.24696307915137103,\n",
       "  0.16112274499871015,\n",
       "  0.07945943725022804,\n",
       "  0.2670641734714514,\n",
       "  0.46124579551745476],\n",
       " [0.3658926240757369,\n",
       "  0.13719923474023474,\n",
       "  0.5148732310736552,\n",
       "  0.4763172676498039,\n",
       "  0.8431913076943988,\n",
       "  0.5059838601289831,\n",
       "  0.062268207206219485,\n",
       "  0.5208396849193394],\n",
       " [0.9146144955308357,\n",
       "  0.45595103258790365,\n",
       "  0.6023961895963434,\n",
       "  0.21515262874141233,\n",
       "  0.04512775712631978,\n",
       "  0.12758504458802614,\n",
       "  0.423908937016697,\n",
       "  0.34356708606522707],\n",
       " [0.2056194398745208,\n",
       "  0.3905215999026419,\n",
       "  0.425053737225994,\n",
       "  0.5531744383528601,\n",
       "  0.30513697263205636,\n",
       "  0.829987041408408,\n",
       "  0.18088393914071782,\n",
       "  0.3445424790670634],\n",
       " [0.374660728267403,\n",
       "  0.13812243859145396,\n",
       "  0.7185914414907231,\n",
       "  0.393288023231717,\n",
       "  0.7896201969901122,\n",
       "  0.09410613701109578,\n",
       "  0.2074446571186874,\n",
       "  0.9264843262811061],\n",
       " [0.6725137379089052,\n",
       "  0.300246415475646,\n",
       "  0.3654605490959248,\n",
       "  0.3362042831675158,\n",
       "  0.0452971790192847,\n",
       "  0.20484043097821458,\n",
       "  0.3131115575086719,\n",
       "  0.9107520001852244],\n",
       " [0.2402744071836309,\n",
       "  0.41424124792021966,\n",
       "  0.31304212422456673,\n",
       "  0.19441216221310242,\n",
       "  0.4151659124000399,\n",
       "  0.2706214500851076,\n",
       "  0.4159906616602883,\n",
       "  0.4358174218749186],\n",
       " [0.29020603645461995,\n",
       "  0.22863755977081557,\n",
       "  1.0647288027384272,\n",
       "  0.5470025148658906,\n",
       "  0.25911082750923986,\n",
       "  0.3065962204930077,\n",
       "  0.24464808754673106,\n",
       "  0.18650972822504694],\n",
       " [0.6967010008373509,\n",
       "  0.15915650623096333,\n",
       "  0.07796209320525438,\n",
       "  0.16626178783603734,\n",
       "  0.5189559065985002,\n",
       "  0.3210652884582264,\n",
       "  0.4642443635410395,\n",
       "  0.32633066484992634],\n",
       " [0.37093238843334414,\n",
       "  0.22551281237011145,\n",
       "  0.6886155527164366,\n",
       "  0.12419020391029663,\n",
       "  0.6433146776061811,\n",
       "  0.2222781384958409,\n",
       "  0.44681780056908976,\n",
       "  0.4679138931369349],\n",
       " [0.2505212255849994,\n",
       "  0.6103317655752556,\n",
       "  0.05734463001022425,\n",
       "  0.683378168279978,\n",
       "  0.13727799704245705,\n",
       "  0.1643262307015581,\n",
       "  0.3929378988763714,\n",
       "  0.4988990433252276],\n",
       " [0.5732173848968716,\n",
       "  0.6026130506472026,\n",
       "  0.5052648690638888,\n",
       "  0.234717005977064,\n",
       "  0.6027812734497201,\n",
       "  0.3796761750474624,\n",
       "  0.39856368938295383,\n",
       "  0.15393098375644382],\n",
       " [0.5308688152921553,\n",
       "  0.32356897061468337,\n",
       "  0.3151123554704925,\n",
       "  0.23118839090211793,\n",
       "  0.5211253567728883,\n",
       "  0.23977001917051358,\n",
       "  0.24457854071840746,\n",
       "  0.1900976429785819],\n",
       " [0.11407390873546502,\n",
       "  0.30223722717942236,\n",
       "  0.4345888414381402,\n",
       "  0.3462762971621072,\n",
       "  0.5254271665230047,\n",
       "  0.08383739530894596,\n",
       "  0.3798538239725206,\n",
       "  0.012637653356727086],\n",
       " [0.476992906756491,\n",
       "  0.04995623588477143,\n",
       "  0.3925915217585339,\n",
       "  0.5194783494991765,\n",
       "  0.40977362877847273,\n",
       "  0.2310868893734421,\n",
       "  0.7291236791835443,\n",
       "  0.4580902698840544],\n",
       " [0.7552773191146449,\n",
       "  0.266765199097898,\n",
       "  0.4380602055936253,\n",
       "  0.6491254209230899,\n",
       "  0.3570281529554281,\n",
       "  0.2549860085249902,\n",
       "  0.238473398445322,\n",
       "  0.3979171850382346],\n",
       " [0.38539645962617103,\n",
       "  0.48948528546472486,\n",
       "  0.21765613227017125,\n",
       "  0.470657734393781,\n",
       "  0.4484248794102638,\n",
       "  0.8547204253339014,\n",
       "  0.5666390772427577,\n",
       "  0.36633730858474856],\n",
       " [0.33325329215921906,\n",
       "  0.023498592132473208,\n",
       "  0.09606692846350989,\n",
       "  0.29092628285432326,\n",
       "  0.14398456061449846,\n",
       "  0.26474843005404514,\n",
       "  0.7476969962759621,\n",
       "  0.48792976035258073],\n",
       " [0.6921833639927251,\n",
       "  0.21721837196492536,\n",
       "  0.26836238586841615,\n",
       "  0.23008907520642727,\n",
       "  -0.007088973618684303,\n",
       "  0.478295363903614,\n",
       "  0.4838048487721153,\n",
       "  0.5812296611028269],\n",
       " [0.1439767159168764,\n",
       "  0.25130211746022074,\n",
       "  -0.07429964459425084,\n",
       "  0.22702414693356623,\n",
       "  0.28393703905394846,\n",
       "  0.4474714672774532,\n",
       "  0.2330212972721935,\n",
       "  0.36406883788519134],\n",
       " [0.27293586718062474,\n",
       "  0.3333711971354184,\n",
       "  0.2663770799015551,\n",
       "  0.2353035394607228,\n",
       "  0.8231417071495646,\n",
       "  0.27386039110133326,\n",
       "  0.15487549585073046,\n",
       "  0.13722727238992247],\n",
       " [0.4302861245283344,\n",
       "  0.22386021089383581,\n",
       "  0.2879829907412367,\n",
       "  0.17979183438118138,\n",
       "  0.14268586754724757,\n",
       "  0.2176828571090289,\n",
       "  0.22189363023464584,\n",
       "  0.32127275821628853],\n",
       " [0.18994981734622715,\n",
       "  0.3215846174786869,\n",
       "  0.18084312086866933,\n",
       "  0.408514501015504,\n",
       "  0.8597988712099354,\n",
       "  0.44456791468221746,\n",
       "  0.3499427442215498,\n",
       "  0.5454656199046424],\n",
       " [0.719649689782154,\n",
       "  0.4299164957872165,\n",
       "  0.3077456065436379,\n",
       "  0.677686271915648,\n",
       "  0.26856274397190555,\n",
       "  0.39180103047869896,\n",
       "  0.5196931178033868,\n",
       "  0.6372664826089695],\n",
       " [0.2935986332609848,\n",
       "  0.26560718033877134,\n",
       "  0.3795576427034563,\n",
       "  0.2288056135509848,\n",
       "  0.6561857499457683,\n",
       "  0.1793946068649409,\n",
       "  0.605486304056761,\n",
       "  0.447953354232985],\n",
       " [0.4415875386877117,\n",
       "  0.18130176314716634,\n",
       "  0.41888336426942296,\n",
       "  0.4877808450237135,\n",
       "  0.604992881440486,\n",
       "  0.2908168649810259,\n",
       "  0.6462717797003728,\n",
       "  0.42066130863999524],\n",
       " [0.563132413684917,\n",
       "  0.14193262377440713,\n",
       "  0.3177524680243249,\n",
       "  0.685583676626636,\n",
       "  -0.08965837927201058,\n",
       "  0.8309197588274083,\n",
       "  0.05560491984335667,\n",
       "  0.21142647940779816]]"
      ]
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "all_local_losses"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "6b5f8d4c",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[[-0.12273535877466202,\n",
       "  0.4293830990791321,\n",
       "  0.823665976524353,\n",
       "  1.4003627300262451,\n",
       "  1.3568060398101807,\n",
       "  1.4502134323120117,\n",
       "  0.8161609768867493,\n",
       "  1.3646652698516846],\n",
       " [0.19095304608345032,\n",
       "  1.085836410522461,\n",
       "  0.9662845134735107,\n",
       "  0.9494696855545044,\n",
       "  0.7412376999855042,\n",
       "  -0.06231619790196419,\n",
       "  0.18970836699008942,\n",
       "  1.2337946891784668],\n",
       " [0.08880627900362015,\n",
       "  -0.2805607318878174,\n",
       "  1.4175548553466797,\n",
       "  1.0951932668685913,\n",
       "  0.5676320195198059,\n",
       "  0.8583052158355713,\n",
       "  0.27465108036994934,\n",
       "  0.11610040068626404],\n",
       " [1.146021842956543,\n",
       "  0.08929156512022018,\n",
       "  0.23736584186553955,\n",
       "  1.3064849376678467,\n",
       "  0.5534606575965881,\n",
       "  0.8520834445953369,\n",
       "  0.6394424438476562,\n",
       "  1.0652333498001099],\n",
       " [0.5067155957221985,\n",
       "  0.9345248937606812,\n",
       "  0.9495549201965332,\n",
       "  0.8420770764350891,\n",
       "  1.279174566268921,\n",
       "  1.210945725440979,\n",
       "  1.0312414169311523,\n",
       "  0.08105404675006866],\n",
       " [0.0018882793374359608,\n",
       "  0.9296644926071167,\n",
       "  1.0364984273910522,\n",
       "  0.4903882145881653,\n",
       "  0.32097095251083374,\n",
       "  1.2088464498519897,\n",
       "  1.186440348625183,\n",
       "  0.1896434873342514],\n",
       " [0.7417868375778198,\n",
       "  1.3401920795440674,\n",
       "  1.3546912670135498,\n",
       "  0.4262484014034271,\n",
       "  0.023525984957814217,\n",
       "  -0.49147939682006836,\n",
       "  0.3576827347278595,\n",
       "  0.9867426753044128],\n",
       " [1.1499103307724,\n",
       "  1.0583412647247314,\n",
       "  0.036260828375816345,\n",
       "  0.5027261972427368,\n",
       "  0.2508220672607422,\n",
       "  0.4437981843948364,\n",
       "  1.3193049430847168,\n",
       "  1.2874784469604492],\n",
       " [-0.21574269235134125,\n",
       "  1.2587610483169556,\n",
       "  0.35888543725013733,\n",
       "  0.043636783957481384,\n",
       "  0.6596008539199829,\n",
       "  0.6751604676246643,\n",
       "  0.7701174020767212,\n",
       "  0.012348154559731483],\n",
       " [0.9688050150871277,\n",
       "  -0.5840412378311157,\n",
       "  0.603628396987915,\n",
       "  0.12677809596061707,\n",
       "  1.2660385370254517,\n",
       "  1.341729998588562,\n",
       "  0.3115597367286682,\n",
       "  1.274918556213379],\n",
       " [1.6444662809371948,\n",
       "  0.21814221143722534,\n",
       "  1.2568672895431519,\n",
       "  0.42314767837524414,\n",
       "  0.9070594310760498,\n",
       "  -0.2976374328136444,\n",
       "  0.4579984247684479,\n",
       "  1.0580129623413086],\n",
       " [0.47318726778030396,\n",
       "  0.3724735975265503,\n",
       "  0.814996063709259,\n",
       "  0.7860387563705444,\n",
       "  -0.380690336227417,\n",
       "  0.48686689138412476,\n",
       "  0.4760197401046753,\n",
       "  0.3814287781715393],\n",
       " [0.20718543231487274,\n",
       "  0.6377472281455994,\n",
       "  0.6881259083747864,\n",
       "  1.32895028591156,\n",
       "  0.8997262120246887,\n",
       "  1.022632122039795,\n",
       "  0.8845871090888977,\n",
       "  -0.11630374193191528],\n",
       " [0.31421974301338196,\n",
       "  -0.9391127228736877,\n",
       "  0.09360677003860474,\n",
       "  0.6385204195976257,\n",
       "  1.0772550106048584,\n",
       "  0.9448704719543457,\n",
       "  1.1468104124069214,\n",
       "  -0.09299150854349136],\n",
       " [0.6131280660629272,\n",
       "  0.18288639187812805,\n",
       "  -0.5135511159896851,\n",
       "  0.8015597462654114,\n",
       "  0.8262664079666138,\n",
       "  0.6185048818588257,\n",
       "  0.163734570145607,\n",
       "  0.08700539916753769],\n",
       " [0.0963938757777214,\n",
       "  1.155400037765503,\n",
       "  0.19505204260349274,\n",
       "  0.7739320993423462,\n",
       "  1.2842259407043457,\n",
       "  0.26025062799453735,\n",
       "  0.5138598680496216,\n",
       "  0.33968180418014526],\n",
       " [-0.044324763119220734,\n",
       "  0.5439729690551758,\n",
       "  1.201080322265625,\n",
       "  0.759075403213501,\n",
       "  -0.09308825433254242,\n",
       "  0.06321369111537933,\n",
       "  0.1273428201675415,\n",
       "  0.29194745421409607],\n",
       " [0.9288972020149231,\n",
       "  0.7744485139846802,\n",
       "  0.46374303102493286,\n",
       "  -0.14436770975589752,\n",
       "  0.5389517545700073,\n",
       "  0.3376571238040924,\n",
       "  0.011137460358440876,\n",
       "  1.0725018978118896],\n",
       " [1.8619627952575684,\n",
       "  0.8220572471618652,\n",
       "  0.7899228930473328,\n",
       "  0.131935715675354,\n",
       "  0.5495374798774719,\n",
       "  0.27542614936828613,\n",
       "  -0.20990456640720367,\n",
       "  0.2087576687335968],\n",
       " [1.0276451110839844,\n",
       "  1.6946643590927124,\n",
       "  -0.3888336420059204,\n",
       "  0.39555269479751587,\n",
       "  -0.1724851429462433,\n",
       "  1.8001785278320312,\n",
       "  1.2338197231292725,\n",
       "  0.27554452419281006],\n",
       " [-0.1710849106311798,\n",
       "  1.1599717140197754,\n",
       "  0.9712091088294983,\n",
       "  -0.502377450466156,\n",
       "  1.031782627105713,\n",
       "  0.457805871963501,\n",
       "  0.8003222942352295,\n",
       "  -0.010724145919084549],\n",
       " [0.47419267892837524,\n",
       "  0.8790967464447021,\n",
       "  0.011865630745887756,\n",
       "  0.3689364790916443,\n",
       "  1.1782057285308838,\n",
       "  0.09413128346204758,\n",
       "  -0.34821099042892456,\n",
       "  1.5008718967437744],\n",
       " [0.17016997933387756,\n",
       "  1.4675791263580322,\n",
       "  0.3571184575557709,\n",
       "  1.7289690971374512,\n",
       "  -0.16811668872833252,\n",
       "  0.3795774579048157,\n",
       "  1.246758222579956,\n",
       "  0.4002976417541504],\n",
       " [0.4303833246231079,\n",
       "  0.03993453085422516,\n",
       "  0.15448635816574097,\n",
       "  0.34564489126205444,\n",
       "  -0.1834205985069275,\n",
       "  1.5958338975906372,\n",
       "  0.3791021704673767,\n",
       "  0.2059432417154312],\n",
       " [-0.18028225004673004,\n",
       "  0.5707733035087585,\n",
       "  0.7852591872215271,\n",
       "  -0.29141512513160706,\n",
       "  0.829521894454956,\n",
       "  -0.0977068692445755,\n",
       "  0.8416354060173035,\n",
       "  1.382007360458374],\n",
       " [1.6502243280410767,\n",
       "  0.6876291036605835,\n",
       "  1.1310594081878662,\n",
       "  0.43456536531448364,\n",
       "  -0.7314342260360718,\n",
       "  0.9112824201583862,\n",
       "  0.643757164478302,\n",
       "  1.7870371341705322],\n",
       " [-0.009965799748897552,\n",
       "  0.5384424924850464,\n",
       "  0.3044642508029938,\n",
       "  -1.0002415180206299,\n",
       "  1.6627384424209595,\n",
       "  0.6525346636772156,\n",
       "  -0.22996525466442108,\n",
       "  0.09897933155298233],\n",
       " [0.621403694152832,\n",
       "  0.5967896580696106,\n",
       "  1.3986972570419312,\n",
       "  1.6381036043167114,\n",
       "  1.0785936117172241,\n",
       "  1.4753040075302124,\n",
       "  1.8028101921081543,\n",
       "  0.7461230754852295],\n",
       " [1.0099613666534424,\n",
       "  0.554298996925354,\n",
       "  1.0358808040618896,\n",
       "  -0.04851757735013962,\n",
       "  1.1820857524871826,\n",
       "  1.085620641708374,\n",
       "  0.4500579833984375,\n",
       "  0.6558305025100708],\n",
       " [0.7692205309867859,\n",
       "  0.9245419502258301,\n",
       "  0.19559352099895477,\n",
       "  1.5708156824111938,\n",
       "  0.9757193326950073,\n",
       "  -0.3849753439426422,\n",
       "  0.19606082141399384,\n",
       "  1.7395235300064087],\n",
       " [0.19273358583450317,\n",
       "  0.11489081382751465,\n",
       "  0.1779593676328659,\n",
       "  1.0263075828552246,\n",
       "  1.0018764734268188,\n",
       "  1.8777252435684204,\n",
       "  1.463614583015442,\n",
       "  -0.1974657028913498],\n",
       " [0.8736598491668701,\n",
       "  1.522114634513855,\n",
       "  0.5075078010559082,\n",
       "  0.7025623321533203,\n",
       "  0.25490957498550415,\n",
       "  0.3465438187122345,\n",
       "  -0.06286951899528503,\n",
       "  0.5118458271026611],\n",
       " [-0.6517409682273865,\n",
       "  0.39915913343429565,\n",
       "  -0.4678086042404175,\n",
       "  0.8883904814720154,\n",
       "  2.4035801887512207,\n",
       "  0.5390931367874146,\n",
       "  1.6625680923461914,\n",
       "  0.9975532293319702],\n",
       " [0.17691586911678314,\n",
       "  -0.3883551359176636,\n",
       "  2.153653144836426,\n",
       "  0.709580659866333,\n",
       "  1.1675981283187866,\n",
       "  1.767632007598877,\n",
       "  0.18795245885849,\n",
       "  -0.3351629674434662],\n",
       " [2.8169031143188477,\n",
       "  -0.5442997813224792,\n",
       "  0.10455396771430969,\n",
       "  -0.41172757744789124,\n",
       "  0.5558698177337646,\n",
       "  0.12082307040691376,\n",
       "  2.0866010189056396,\n",
       "  0.8465304374694824],\n",
       " [0.8605324029922485,\n",
       "  -0.20478951930999756,\n",
       "  0.5333120226860046,\n",
       "  -0.19895248115062714,\n",
       "  1.2670618295669556,\n",
       "  1.560657262802124,\n",
       "  0.8401447534561157,\n",
       "  2.2661631107330322],\n",
       " [1.2229810953140259,\n",
       "  1.1150610446929932,\n",
       "  0.9145590662956238,\n",
       "  -0.1727176159620285,\n",
       "  1.2289706468582153,\n",
       "  1.1284265518188477,\n",
       "  0.09285375475883484,\n",
       "  0.9030546545982361],\n",
       " [0.8467965126037598,\n",
       "  0.5670166015625,\n",
       "  0.4259852170944214,\n",
       "  0.178435280919075,\n",
       "  0.22044330835342407,\n",
       "  1.0877060890197754,\n",
       "  0.024525798857212067,\n",
       "  -0.03997735306620598],\n",
       " [1.335908055305481,\n",
       "  1.631274700164795,\n",
       "  1.1369702816009521,\n",
       "  1.5947016477584839,\n",
       "  0.49912789463996887,\n",
       "  -0.028591182082891464,\n",
       "  1.3367502689361572,\n",
       "  -0.5721298456192017],\n",
       " [1.727461814880371,\n",
       "  0.7466651797294617,\n",
       "  0.33674049377441406,\n",
       "  1.2122725248336792,\n",
       "  0.6484026908874512,\n",
       "  1.008802056312561,\n",
       "  0.625784158706665,\n",
       "  0.33521130681037903],\n",
       " [0.6457628607749939,\n",
       "  -0.8702088594436646,\n",
       "  1.286202311515808,\n",
       "  0.9541146755218506,\n",
       "  -0.42700281739234924,\n",
       "  0.8587853312492371,\n",
       "  0.4369887113571167,\n",
       "  1.1375935077667236],\n",
       " [0.6594102382659912,\n",
       "  0.4240157902240753,\n",
       "  1.1186976432800293,\n",
       "  -0.03125776723027229,\n",
       "  0.3807719647884369,\n",
       "  0.8530459403991699,\n",
       "  1.3497250080108643,\n",
       "  0.3045770525932312],\n",
       " [1.0847193002700806,\n",
       "  0.45566242933273315,\n",
       "  -0.015679514035582542,\n",
       "  1.620813012123108,\n",
       "  1.6149189472198486,\n",
       "  0.494651734828949,\n",
       "  0.6872051954269409,\n",
       "  0.6699979901313782],\n",
       " [1.466213583946228,\n",
       "  0.349964439868927,\n",
       "  0.9522100687026978,\n",
       "  0.3921002447605133,\n",
       "  0.8686020374298096,\n",
       "  0.48416244983673096,\n",
       "  0.7669258117675781,\n",
       "  0.5969817638397217],\n",
       " [-0.05682779476046562,\n",
       "  1.329907774925232,\n",
       "  1.740923523902893,\n",
       "  1.3203623294830322,\n",
       "  0.7283671498298645,\n",
       "  -0.43986573815345764,\n",
       "  1.568872094154358,\n",
       "  0.5712873935699463],\n",
       " [-0.4001061022281647,\n",
       "  1.2929438352584839,\n",
       "  0.9078203439712524,\n",
       "  2.008751392364502,\n",
       "  1.0284215211868286,\n",
       "  0.7485091686248779,\n",
       "  1.9181348085403442,\n",
       "  0.7085044980049133],\n",
       " [0.5234508514404297,\n",
       "  0.9648851156234741,\n",
       "  -0.014580228365957737,\n",
       "  -0.14129821956157684,\n",
       "  0.5580005645751953,\n",
       "  0.6662064790725708,\n",
       "  -0.3447763919830322,\n",
       "  0.5539442300796509],\n",
       " [0.5868099927902222,\n",
       "  1.1373411417007446,\n",
       "  0.9787418842315674,\n",
       "  0.9974232912063599,\n",
       "  0.8965637683868408,\n",
       "  0.6489600539207458,\n",
       "  -0.2312498241662979,\n",
       "  0.5547488927841187],\n",
       " [0.8967232704162598,\n",
       "  1.1268131732940674,\n",
       "  -0.2429850697517395,\n",
       "  0.11191015690565109,\n",
       "  0.33183661103248596,\n",
       "  1.5091993808746338,\n",
       "  -0.5315638780593872,\n",
       "  0.7812240123748779],\n",
       " [0.22916576266288757,\n",
       "  -0.1184636652469635,\n",
       "  0.7850866317749023,\n",
       "  0.6542977690696716,\n",
       "  0.7238544225692749,\n",
       "  -0.3422430455684662,\n",
       "  1.1523951292037964,\n",
       "  1.8055365085601807],\n",
       " [0.8869059085845947,\n",
       "  0.6778165102005005,\n",
       "  0.20076753199100494,\n",
       "  0.8699551820755005,\n",
       "  1.0303726196289062,\n",
       "  0.8645713329315186,\n",
       "  0.6738618016242981,\n",
       "  0.9608979225158691],\n",
       " [1.0404930114746094,\n",
       "  0.6775041222572327,\n",
       "  0.01664348691701889,\n",
       "  0.738246500492096,\n",
       "  -0.6736207604408264,\n",
       "  -0.1557050198316574,\n",
       "  1.106398105621338,\n",
       "  0.11725912243127823],\n",
       " [1.148938775062561,\n",
       "  0.5770850777626038,\n",
       "  1.9515067338943481,\n",
       "  0.03741566464304924,\n",
       "  0.418009877204895,\n",
       "  0.7584006190299988,\n",
       "  0.07960788160562515,\n",
       "  -0.72380530834198],\n",
       " [0.9678298830986023,\n",
       "  0.9638010263442993,\n",
       "  0.7628788948059082,\n",
       "  0.09845443814992905,\n",
       "  0.8171525001525879,\n",
       "  0.13284318149089813,\n",
       "  -0.0912579894065857,\n",
       "  0.2398718297481537],\n",
       " [0.8475292325019836,\n",
       "  1.0412299633026123,\n",
       "  0.7081903219223022,\n",
       "  -0.3048730492591858,\n",
       "  0.8129615187644958,\n",
       "  1.3567229509353638,\n",
       "  1.3846336603164673,\n",
       "  1.1298003196716309],\n",
       " [0.7563071250915527,\n",
       "  0.9692034721374512,\n",
       "  0.5080716013908386,\n",
       "  -0.4015829861164093,\n",
       "  0.8556910753250122,\n",
       "  1.1129586696624756,\n",
       "  1.162887692451477,\n",
       "  0.2965630292892456],\n",
       " [-0.23450809717178345,\n",
       "  0.1097637340426445,\n",
       "  1.3509008884429932,\n",
       "  0.8699640035629272,\n",
       "  1.3264583349227905,\n",
       "  0.8755963444709778,\n",
       "  0.42748528718948364,\n",
       "  0.26399633288383484],\n",
       " [-0.06503137201070786,\n",
       "  0.3414119482040405,\n",
       "  -0.3284122943878174,\n",
       "  0.588634729385376,\n",
       "  -0.08956240862607956,\n",
       "  1.2712421417236328,\n",
       "  0.7651824951171875,\n",
       "  -0.2779000699520111],\n",
       " [1.5660548210144043,\n",
       "  -0.1147783100605011,\n",
       "  0.611056923866272,\n",
       "  1.2002747058868408,\n",
       "  0.10782119631767273,\n",
       "  0.5114282369613647,\n",
       "  -0.03386865556240082,\n",
       "  1.610475778579712],\n",
       " [-0.38014188408851624,\n",
       "  0.3995837867259979,\n",
       "  -0.9168930649757385,\n",
       "  -0.245725616812706,\n",
       "  0.15266118943691254,\n",
       "  1.4193698167800903,\n",
       "  1.2011315822601318,\n",
       "  0.887327253818512],\n",
       " [0.8882392644882202,\n",
       "  -0.3124019503593445,\n",
       "  0.17429600656032562,\n",
       "  1.1203545331954956,\n",
       "  0.9864097833633423,\n",
       "  0.8096059560775757,\n",
       "  0.6562900543212891,\n",
       "  -0.056884199380874634],\n",
       " [1.0333786010742188,\n",
       "  1.5565909147262573,\n",
       "  0.7352113127708435,\n",
       "  0.5369771718978882,\n",
       "  1.5689046382904053,\n",
       "  -0.21768377721309662,\n",
       "  0.6518954634666443,\n",
       "  0.7909386157989502],\n",
       " [0.6819406151771545,\n",
       "  0.2596179246902466,\n",
       "  1.395141839981079,\n",
       "  0.44088810682296753,\n",
       "  0.5442743301391602,\n",
       "  0.17360247671604156,\n",
       "  1.2278478145599365,\n",
       "  0.9260964393615723],\n",
       " [0.6046690940856934,\n",
       "  1.8418974876403809,\n",
       "  1.033512830734253,\n",
       "  1.3375694751739502,\n",
       "  -0.7930704355239868,\n",
       "  1.3605326414108276,\n",
       "  0.30950579047203064,\n",
       "  1.6799215078353882],\n",
       " [1.0422734022140503,\n",
       "  0.23285263776779175,\n",
       "  0.44873595237731934,\n",
       "  -0.36531302332878113,\n",
       "  1.3732285499572754,\n",
       "  0.684841513633728,\n",
       "  0.2920619249343872,\n",
       "  0.13165713846683502],\n",
       " [0.20678088068962097,\n",
       "  0.21223777532577515,\n",
       "  1.0485395193099976,\n",
       "  0.2118672877550125,\n",
       "  1.5578877925872803,\n",
       "  1.033029556274414,\n",
       "  -0.3561978042125702,\n",
       "  1.048647403717041],\n",
       " [1.0688258409500122,\n",
       "  0.2797010838985443,\n",
       "  0.7113785147666931,\n",
       "  1.0736552476882935,\n",
       "  0.7215460538864136,\n",
       "  0.5270025730133057,\n",
       "  0.6452867388725281,\n",
       "  0.4923930764198303],\n",
       " [0.2631579339504242,\n",
       "  -0.1595335304737091,\n",
       "  2.018009662628174,\n",
       "  0.5045877695083618,\n",
       "  0.9584704637527466,\n",
       "  0.7815213203430176,\n",
       "  0.8296893239021301,\n",
       "  0.11111757159233093],\n",
       " [0.7410820126533508,\n",
       "  0.7739843130111694,\n",
       "  0.7596141695976257,\n",
       "  0.8363016247749329,\n",
       "  0.7554786801338196,\n",
       "  1.5400041341781616,\n",
       "  0.5441151857376099,\n",
       "  0.8477463126182556],\n",
       " [0.7982027530670166,\n",
       "  0.759619414806366,\n",
       "  0.3697192370891571,\n",
       "  1.2497334480285645,\n",
       "  0.15120016038417816,\n",
       "  -0.6102544069290161,\n",
       "  0.33998918533325195,\n",
       "  1.1474957466125488],\n",
       " [1.3214415311813354,\n",
       "  0.9719683527946472,\n",
       "  1.158975601196289,\n",
       "  1.5396783351898193,\n",
       "  1.1528315544128418,\n",
       "  1.0233190059661865,\n",
       "  0.33829182386398315,\n",
       "  0.14428164064884186],\n",
       " [0.6855496764183044,\n",
       "  0.8581211566925049,\n",
       "  0.9151592254638672,\n",
       "  1.5470203161239624,\n",
       "  0.5008871555328369,\n",
       "  1.2970519065856934,\n",
       "  0.21078725159168243,\n",
       "  0.07237814366817474],\n",
       " [-0.4095434546470642,\n",
       "  1.2031100988388062,\n",
       "  0.6579062938690186,\n",
       "  0.5500364303588867,\n",
       "  1.3368297815322876,\n",
       "  0.5007888674736023,\n",
       "  0.1778034120798111,\n",
       "  0.4480767250061035],\n",
       " [0.8784123063087463,\n",
       "  0.19292566180229187,\n",
       "  0.37475651502609253,\n",
       "  0.9341398477554321,\n",
       "  0.18644528090953827,\n",
       "  1.6090587377548218,\n",
       "  0.552269458770752,\n",
       "  0.7758820652961731],\n",
       " [0.23938561975955963,\n",
       "  1.1857229471206665,\n",
       "  0.352092444896698,\n",
       "  0.9630482196807861,\n",
       "  0.8108285069465637,\n",
       "  0.8178543448448181,\n",
       "  0.18648366630077362,\n",
       "  0.5783163905143738],\n",
       " [0.5373773574829102,\n",
       "  0.7675755023956299,\n",
       "  1.77919602394104,\n",
       "  0.6773523688316345,\n",
       "  0.5908665060997009,\n",
       "  0.7050622701644897,\n",
       "  1.8510714769363403,\n",
       "  0.6173234581947327],\n",
       " [0.8384829759597778,\n",
       "  0.8069192171096802,\n",
       "  0.602565348148346,\n",
       "  0.7172342538833618,\n",
       "  0.729820191860199,\n",
       "  1.1484909057617188,\n",
       "  1.2924789190292358,\n",
       "  1.4315375089645386],\n",
       " [1.1782653331756592,\n",
       "  0.8054063320159912,\n",
       "  0.31589001417160034,\n",
       "  1.7839341163635254,\n",
       "  -0.7172083854675293,\n",
       "  0.7137279510498047,\n",
       "  0.2682870328426361,\n",
       "  0.2803722620010376],\n",
       " [0.7248608469963074,\n",
       "  -0.26130107045173645,\n",
       "  2.0461912155151367,\n",
       "  0.7224082350730896,\n",
       "  0.7410879135131836,\n",
       "  0.14208215475082397,\n",
       "  0.8777621388435364,\n",
       "  -0.9385627508163452],\n",
       " [-0.30632659792900085,\n",
       "  0.9808575510978699,\n",
       "  0.7295591831207275,\n",
       "  0.2088092416524887,\n",
       "  0.019872846081852913,\n",
       "  1.2050191164016724,\n",
       "  -0.3474351167678833,\n",
       "  1.17479407787323],\n",
       " [0.20904719829559326,\n",
       "  0.45505788922309875,\n",
       "  0.28210440278053284,\n",
       "  -0.009509935975074768,\n",
       "  -1.037472128868103,\n",
       "  1.1277707815170288,\n",
       "  1.220852255821228,\n",
       "  0.15451034903526306],\n",
       " [0.5883464217185974,\n",
       "  0.29319003224372864,\n",
       "  -0.060440342873334885,\n",
       "  -0.055946122854948044,\n",
       "  0.09724418073892593,\n",
       "  0.9549344778060913,\n",
       "  0.43950051069259644,\n",
       "  1.161268711090088],\n",
       " [1.3197003602981567,\n",
       "  1.2076330184936523,\n",
       "  -0.003703589551150799,\n",
       "  0.8354045152664185,\n",
       "  -0.425660640001297,\n",
       "  1.4657313823699951,\n",
       "  0.47799360752105713,\n",
       "  1.3319497108459473],\n",
       " [0.5192162990570068,\n",
       "  0.8687041997909546,\n",
       "  0.008965592831373215,\n",
       "  0.4918502867221832,\n",
       "  0.36339232325553894,\n",
       "  0.2682635188102722,\n",
       "  -0.20460271835327148,\n",
       "  0.35946759581565857],\n",
       " [0.2912704348564148,\n",
       "  0.12361277639865875,\n",
       "  0.17179493606090546,\n",
       "  0.6149757504463196,\n",
       "  0.6315135359764099,\n",
       "  0.4211508333683014,\n",
       "  1.3767644166946411,\n",
       "  1.653198480606079],\n",
       " [-0.05475441366434097,\n",
       "  0.6019850969314575,\n",
       "  1.157862663269043,\n",
       "  0.5417839288711548,\n",
       "  1.4341111183166504,\n",
       "  0.5891523361206055,\n",
       "  0.8009392619132996,\n",
       "  0.058924514800310135],\n",
       " [1.4671003818511963,\n",
       "  0.7696497440338135,\n",
       "  0.04928953945636749,\n",
       "  -0.18722759187221527,\n",
       "  0.6454852819442749,\n",
       "  0.6064060926437378,\n",
       "  -0.3688652217388153,\n",
       "  1.315749168395996],\n",
       " [0.42179441452026367,\n",
       "  0.6917396187782288,\n",
       "  0.5397831797599792,\n",
       "  1.955932855606079,\n",
       "  0.29281508922576904,\n",
       "  0.9329748153686523,\n",
       "  -0.5908997058868408,\n",
       "  -0.5207396149635315],\n",
       " [0.750171422958374,\n",
       "  0.48494982719421387,\n",
       "  0.2705206573009491,\n",
       "  0.8532837629318237,\n",
       "  0.7773425579071045,\n",
       "  1.7942534685134888,\n",
       "  1.4432263374328613,\n",
       "  0.5014457702636719],\n",
       " [0.3951336145401001,\n",
       "  0.9864742159843445,\n",
       "  0.8481379747390747,\n",
       "  1.2072962522506714,\n",
       "  -0.27603867650032043,\n",
       "  -0.05330312252044678,\n",
       "  0.19482672214508057,\n",
       "  1.1962283849716187],\n",
       " [0.04070121422410011,\n",
       "  1.8823387622833252,\n",
       "  0.30890876054763794,\n",
       "  0.40606430172920227,\n",
       "  1.9467653036117554,\n",
       "  1.2084542512893677,\n",
       "  0.004305973183363676,\n",
       "  -0.0805482342839241],\n",
       " [0.4816119074821472,\n",
       "  0.8144100308418274,\n",
       "  0.8388530015945435,\n",
       "  0.26400429010391235,\n",
       "  1.061220407485962,\n",
       "  0.4531838297843933,\n",
       "  -0.4124085307121277,\n",
       "  0.6626724004745483],\n",
       " [1.070812463760376,\n",
       "  -0.1101609468460083,\n",
       "  0.6500921249389648,\n",
       "  0.513896644115448,\n",
       "  1.3457449674606323,\n",
       "  0.9877849817276001,\n",
       "  -0.6804384589195251,\n",
       "  0.3385601043701172],\n",
       " [0.6764967441558838,\n",
       "  0.10054601728916168,\n",
       "  0.09597928076982498,\n",
       "  0.7694987654685974,\n",
       "  0.2923740744590759,\n",
       "  1.0086615085601807,\n",
       "  0.07823382318019867,\n",
       "  0.04339151456952095],\n",
       " [0.8498806357383728,\n",
       "  1.1535964012145996,\n",
       "  0.9463326930999756,\n",
       "  0.47997772693634033,\n",
       "  1.004679799079895,\n",
       "  0.9928759336471558,\n",
       "  0.5152087211608887,\n",
       "  0.9316871166229248],\n",
       " [-0.05526696890592575,\n",
       "  1.2278984785079956,\n",
       "  0.4096958637237549,\n",
       "  -0.44681990146636963,\n",
       "  0.2940526604652405,\n",
       "  0.7505502700805664,\n",
       "  0.8732110857963562,\n",
       "  2.1323494911193848],\n",
       " [0.7354442477226257,\n",
       "  1.1341750621795654,\n",
       "  1.1794847249984741,\n",
       "  0.7649127244949341,\n",
       "  1.785014271736145,\n",
       "  0.6644093990325928,\n",
       "  0.9591809511184692,\n",
       "  0.606896698474884],\n",
       " [0.7801209092140198,\n",
       "  1.1253390312194824,\n",
       "  1.2356977462768555,\n",
       "  -0.22753582894802094,\n",
       "  0.4800240695476532,\n",
       "  1.3247133493423462,\n",
       "  0.48555701971054077,\n",
       "  -0.16997872292995453],\n",
       " [0.6628124117851257,\n",
       "  1.1852589845657349,\n",
       "  2.192808151245117,\n",
       "  0.23406760394573212,\n",
       "  0.1030799075961113,\n",
       "  0.582740068435669,\n",
       "  -0.6167315244674683,\n",
       "  -0.04028385877609253],\n",
       " [-0.010954015888273716,\n",
       "  1.038774847984314,\n",
       "  0.8577556610107422,\n",
       "  1.1882089376449585,\n",
       "  0.650393009185791,\n",
       "  -0.9517457485198975,\n",
       "  0.9435125589370728,\n",
       "  0.6280628442764282],\n",
       " [1.6762473583221436,\n",
       "  0.3327704966068268,\n",
       "  0.2545875608921051,\n",
       "  1.5098705291748047,\n",
       "  1.0780363082885742,\n",
       "  0.9733999967575073,\n",
       "  -0.48176679015159607,\n",
       "  1.6776777505874634],\n",
       " [1.5276129245758057,\n",
       "  0.24758389592170715,\n",
       "  0.45884865522384644,\n",
       "  1.42098069190979,\n",
       "  0.06954687833786011,\n",
       "  0.38857394456863403,\n",
       "  0.7807379961013794,\n",
       "  0.33969971537590027],\n",
       " [0.8601626753807068,\n",
       "  0.09213321655988693,\n",
       "  0.8481763601303101,\n",
       "  0.2543766498565674,\n",
       "  -0.10375819355249405,\n",
       "  0.6901337504386902,\n",
       "  -0.3980754315853119,\n",
       "  1.0302845239639282],\n",
       " [1.8237831592559814,\n",
       "  1.2362412214279175,\n",
       "  0.8986225128173828,\n",
       "  0.6020984053611755,\n",
       "  0.8156884908676147,\n",
       "  0.9590381383895874,\n",
       "  0.30237531661987305,\n",
       "  -0.2997187674045563],\n",
       " [0.9146020412445068,\n",
       "  0.508873701095581,\n",
       "  0.28016042709350586,\n",
       "  0.3422899544239044,\n",
       "  -0.40016594529151917,\n",
       "  0.35678502917289734,\n",
       "  1.8455196619033813,\n",
       "  0.8587247133255005],\n",
       " [-0.47478437423706055,\n",
       "  0.9118719696998596,\n",
       "  1.2835075855255127,\n",
       "  0.8211827874183655,\n",
       "  0.38739511370658875,\n",
       "  0.7812017202377319,\n",
       "  0.25025805830955505,\n",
       "  1.302846908569336],\n",
       " [1.4159207344055176,\n",
       "  0.16711345314979553,\n",
       "  1.434517741203308,\n",
       "  1.3214057683944702,\n",
       "  1.3226855993270874,\n",
       "  1.1861059665679932,\n",
       "  -0.20304208993911743,\n",
       "  1.5786973237991333],\n",
       " [0.3654381334781647,\n",
       "  0.5081825256347656,\n",
       "  0.014794752933084965,\n",
       "  1.148366093635559,\n",
       "  -0.4811488389968872,\n",
       "  -0.25468310713768005,\n",
       "  1.1656373739242554,\n",
       "  0.11195844411849976],\n",
       " [1.4085272550582886,\n",
       "  -0.4037753641605377,\n",
       "  0.9422607421875,\n",
       "  0.13988910615444183,\n",
       "  0.050116777420043945,\n",
       "  0.1134689599275589,\n",
       "  0.4285908639431,\n",
       "  0.5250247716903687],\n",
       " [0.44087299704551697,\n",
       "  0.36162152886390686,\n",
       "  1.8734948635101318,\n",
       "  0.5431849956512451,\n",
       "  0.7395663261413574,\n",
       "  1.2482692003250122,\n",
       "  1.1528228521347046,\n",
       "  2.428717851638794],\n",
       " [0.33401569724082947,\n",
       "  0.7288097739219666,\n",
       "  1.1391453742980957,\n",
       "  1.2940435409545898,\n",
       "  0.22970148921012878,\n",
       "  0.19682931900024414,\n",
       "  0.47764045000076294,\n",
       "  -0.5661598443984985],\n",
       " [0.295767217874527,\n",
       "  0.26648056507110596,\n",
       "  0.1995035856962204,\n",
       "  0.049438633024692535,\n",
       "  1.508899450302124,\n",
       "  1.3668992519378662,\n",
       "  -0.4998842477798462,\n",
       "  1.1007957458496094],\n",
       " [1.0798485279083252,\n",
       "  1.6273581981658936,\n",
       "  0.42707160115242004,\n",
       "  0.6443476676940918,\n",
       "  0.8067579865455627,\n",
       "  2.0519936084747314,\n",
       "  -0.2740725576877594,\n",
       "  0.5505490303039551],\n",
       " [0.7769162654876709,\n",
       "  -0.00224693538621068,\n",
       "  0.03957480192184448,\n",
       "  0.906862735748291,\n",
       "  0.3893915116786957,\n",
       "  0.8970329165458679,\n",
       "  1.2255879640579224,\n",
       "  0.08437921851873398],\n",
       " [-0.14571775496006012,\n",
       "  0.966600775718689,\n",
       "  0.4345952570438385,\n",
       "  0.8231303691864014,\n",
       "  0.5031105875968933,\n",
       "  0.21177180111408234,\n",
       "  1.299818515777588,\n",
       "  0.8648117780685425],\n",
       " [0.7784526348114014,\n",
       "  0.27712348103523254,\n",
       "  0.885769784450531,\n",
       "  1.1886438131332397,\n",
       "  2.076601982116699,\n",
       "  0.8939675688743591,\n",
       "  0.4607243835926056,\n",
       "  1.9945210218429565],\n",
       " [0.5082095861434937,\n",
       "  1.2103382349014282,\n",
       "  -0.6380113363265991,\n",
       "  0.7936013340950012,\n",
       "  2.037707805633545,\n",
       "  0.3043364882469177,\n",
       "  1.1879013776779175,\n",
       "  1.046068549156189],\n",
       " [1.0691053867340088,\n",
       "  0.5895365476608276,\n",
       "  1.2797671556472778,\n",
       "  -0.09345820546150208,\n",
       "  0.21474705636501312,\n",
       "  0.5214143991470337,\n",
       "  0.336204469203949,\n",
       "  0.8931235671043396],\n",
       " [0.680213212966919,\n",
       "  0.1768905073404312,\n",
       "  0.6707269549369812,\n",
       "  0.4638071358203888,\n",
       "  0.039748724550008774,\n",
       "  0.39233896136283875,\n",
       "  1.110952615737915,\n",
       "  2.279693603515625],\n",
       " [1.09459388256073,\n",
       "  -0.1857849806547165,\n",
       "  1.0100557804107666,\n",
       "  -0.1944914609193802,\n",
       "  0.10527966171503067,\n",
       "  -0.2062772661447525,\n",
       "  1.8221585750579834,\n",
       "  1.0229823589324951],\n",
       " [0.7802533507347107,\n",
       "  0.7085791826248169,\n",
       "  -0.27328455448150635,\n",
       "  1.2189863920211792,\n",
       "  1.36111319065094,\n",
       "  0.20935791730880737,\n",
       "  0.16338394582271576,\n",
       "  1.0709962844848633],\n",
       " [-0.40292611718177795,\n",
       "  0.7480397820472717,\n",
       "  -0.45666036009788513,\n",
       "  0.7400232553482056,\n",
       "  0.19062019884586334,\n",
       "  0.1543777734041214,\n",
       "  1.5362282991409302,\n",
       "  -0.6224579215049744],\n",
       " [0.3204180896282196,\n",
       "  0.5564504265785217,\n",
       "  0.755052387714386,\n",
       "  -0.5127205848693848,\n",
       "  0.5845710635185242,\n",
       "  0.429622620344162,\n",
       "  1.6940213441848755,\n",
       "  0.4731834828853607],\n",
       " [0.342244416475296,\n",
       "  1.0649559497833252,\n",
       "  1.361362099647522,\n",
       "  0.26266101002693176,\n",
       "  0.7062035799026489,\n",
       "  1.9544378519058228,\n",
       "  0.44494250416755676,\n",
       "  1.070021629333496],\n",
       " [1.1845399141311646,\n",
       "  1.1421592235565186,\n",
       "  1.2326806783676147,\n",
       "  2.094886064529419,\n",
       "  1.2099201679229736,\n",
       "  1.5248699188232422,\n",
       "  0.6916237473487854,\n",
       "  1.8732826709747314],\n",
       " [0.38201871514320374,\n",
       "  0.3010265827178955,\n",
       "  0.2219436764717102,\n",
       "  0.9328641891479492,\n",
       "  1.0962135791778564,\n",
       "  -0.3582609295845032,\n",
       "  -0.31855231523513794,\n",
       "  -0.16691841185092926],\n",
       " [1.5940206050872803,\n",
       "  1.3722281455993652,\n",
       "  0.9695937633514404,\n",
       "  0.05265501141548157,\n",
       "  0.7468364834785461,\n",
       "  0.9713484048843384,\n",
       "  -0.47122853994369507,\n",
       "  1.077422022819519],\n",
       " [0.6325852870941162,\n",
       "  1.7636756896972656,\n",
       "  0.18441727757453918,\n",
       "  -0.11720021814107895,\n",
       "  -0.0647180825471878,\n",
       "  1.2013732194900513,\n",
       "  1.0529659986495972,\n",
       "  0.9363645315170288],\n",
       " [1.649731159210205,\n",
       "  1.1633039712905884,\n",
       "  1.136684536933899,\n",
       "  -0.8585618734359741,\n",
       "  0.46549010276794434,\n",
       "  0.7601560354232788,\n",
       "  0.33394503593444824,\n",
       "  0.529916524887085],\n",
       " [0.6806090474128723,\n",
       "  0.5332726836204529,\n",
       "  0.5788615942001343,\n",
       "  1.0026912689208984,\n",
       "  0.17414388060569763,\n",
       "  1.128904104232788,\n",
       "  0.46309685707092285,\n",
       "  0.6091936230659485],\n",
       " [0.9383937120437622,\n",
       "  0.35065189003944397,\n",
       "  1.2133915424346924,\n",
       "  0.09652777761220932,\n",
       "  0.23130734264850616,\n",
       "  0.8313935995101929,\n",
       "  1.2097878456115723,\n",
       "  1.3450511693954468],\n",
       " [1.2856299877166748,\n",
       "  1.1726293563842773,\n",
       "  0.6220411062240601,\n",
       "  1.1201838254928589,\n",
       "  0.26893267035484314,\n",
       "  1.825939416885376,\n",
       "  1.1493149995803833,\n",
       "  0.5522738695144653],\n",
       " [1.420372486114502,\n",
       "  1.929799199104309,\n",
       "  0.32010161876678467,\n",
       "  0.6061977744102478,\n",
       "  0.9504846334457397,\n",
       "  -0.5548103451728821,\n",
       "  -0.555100679397583,\n",
       "  0.8679473400115967],\n",
       " [1.2216781377792358,\n",
       "  0.017833834514021873,\n",
       "  -0.19076445698738098,\n",
       "  0.3530937135219574,\n",
       "  1.502084732055664,\n",
       "  0.7612553238868713,\n",
       "  0.5569666028022766,\n",
       "  0.2533722221851349],\n",
       " [0.6908956170082092,\n",
       "  0.7026292681694031,\n",
       "  0.6863276362419128,\n",
       "  0.1765701323747635,\n",
       "  0.11752423644065857,\n",
       "  0.38480836153030396,\n",
       "  0.5867794156074524,\n",
       "  -0.08491615951061249],\n",
       " [0.325395405292511,\n",
       "  1.1613688468933105,\n",
       "  1.7923598289489746,\n",
       "  0.4058593809604645,\n",
       "  1.0975927114486694,\n",
       "  1.2030913829803467,\n",
       "  1.2477843761444092,\n",
       "  0.6254990696907043],\n",
       " [1.6502033472061157,\n",
       "  0.6255520582199097,\n",
       "  0.8132091164588928,\n",
       "  0.7103356719017029,\n",
       "  0.23574237525463104,\n",
       "  0.7412241697311401,\n",
       "  0.8604669570922852,\n",
       "  0.16620051860809326],\n",
       " [0.8607344627380371,\n",
       "  0.5126792192459106,\n",
       "  0.8949565887451172,\n",
       "  -0.8624919056892395,\n",
       "  0.6184847354888916,\n",
       "  0.12827281653881073,\n",
       "  0.5515650510787964,\n",
       "  0.7814645767211914],\n",
       " [1.2509520053863525,\n",
       "  1.8265074491500854,\n",
       "  0.913383424282074,\n",
       "  0.320530503988266,\n",
       "  0.04232092574238777,\n",
       "  0.6731711030006409,\n",
       "  1.3651885986328125,\n",
       "  0.062046606093645096],\n",
       " [0.08859376609325409,\n",
       "  1.5717859268188477,\n",
       "  0.5105108618736267,\n",
       "  0.9112561941146851,\n",
       "  0.013352496549487114,\n",
       "  -0.15414656698703766,\n",
       "  0.165570467710495,\n",
       "  1.1486862897872925],\n",
       " [0.002404895145446062,\n",
       "  -1.020896315574646,\n",
       "  1.1896462440490723,\n",
       "  1.174106240272522,\n",
       "  1.0812612771987915,\n",
       "  -0.43598777055740356,\n",
       "  0.359160840511322,\n",
       "  0.43582600355148315],\n",
       " [1.2849513292312622,\n",
       "  0.9163972735404968,\n",
       "  0.6152085065841675,\n",
       "  -0.3441660404205322,\n",
       "  1.0357708930969238,\n",
       "  0.3024398684501648,\n",
       "  0.8666316866874695,\n",
       "  1.192746639251709],\n",
       " [0.7207927107810974,\n",
       "  0.38642406463623047,\n",
       "  0.6963471174240112,\n",
       "  0.8221131563186646,\n",
       "  -0.5168943405151367,\n",
       "  -0.015445951372385025,\n",
       "  1.347639560699463,\n",
       "  1.7618341445922852],\n",
       " [2.1668777465820312,\n",
       "  -0.24251900613307953,\n",
       "  0.29317954182624817,\n",
       "  -0.25726935267448425,\n",
       "  0.673160195350647,\n",
       "  1.4945576190948486,\n",
       "  1.853124737739563,\n",
       "  1.1555931568145752],\n",
       " [1.703830361366272,\n",
       "  0.9154699444770813,\n",
       "  1.2690119743347168,\n",
       "  0.2828417122364044,\n",
       "  1.1332813501358032,\n",
       "  0.932121753692627,\n",
       "  1.6745953559875488,\n",
       "  0.11623423546552658],\n",
       " [0.7360759377479553,\n",
       "  0.6206526756286621,\n",
       "  -0.3077438175678253,\n",
       "  1.6071828603744507,\n",
       "  2.2032573223114014,\n",
       "  0.5375311374664307,\n",
       "  0.7111477851867676,\n",
       "  0.6018490195274353],\n",
       " [1.0654525756835938,\n",
       "  0.7688785791397095,\n",
       "  0.5586860775947571,\n",
       "  0.9650017023086548,\n",
       "  0.9072736501693726,\n",
       "  0.9018439054489136,\n",
       "  -0.22253873944282532,\n",
       "  0.36667880415916443],\n",
       " [1.2405452728271484,\n",
       "  1.0191833972930908,\n",
       "  -0.005214673932641745,\n",
       "  1.040305733680725,\n",
       "  0.5613710880279541,\n",
       "  0.5602118968963623,\n",
       "  0.7575920820236206,\n",
       "  -0.35897281765937805],\n",
       " [0.5569193363189697,\n",
       "  1.3551079034805298,\n",
       "  0.15984205901622772,\n",
       "  0.2479133903980255,\n",
       "  0.7340414524078369,\n",
       "  1.3456863164901733,\n",
       "  1.3751288652420044,\n",
       "  0.7270274758338928],\n",
       " [0.6787663102149963,\n",
       "  0.9594399929046631,\n",
       "  0.5937714576721191,\n",
       "  0.1389978677034378,\n",
       "  0.3654891848564148,\n",
       "  0.3847017288208008,\n",
       "  1.3676127195358276,\n",
       "  0.8723689913749695],\n",
       " [1.132835865020752,\n",
       "  -0.9039286971092224,\n",
       "  0.5807647705078125,\n",
       "  1.483508586883545,\n",
       "  1.7751233577728271,\n",
       "  -0.04039357602596283,\n",
       "  0.12359971553087234,\n",
       "  0.5862955451011658],\n",
       " [1.40657639503479,\n",
       "  0.588105320930481,\n",
       "  0.5496704578399658,\n",
       "  2.0465126037597656,\n",
       "  0.35773658752441406,\n",
       "  0.7202553749084473,\n",
       "  0.509002685546875,\n",
       "  0.7192836403846741],\n",
       " [0.3304469883441925,\n",
       "  0.6458250880241394,\n",
       "  0.9772236347198486,\n",
       "  1.4723252058029175,\n",
       "  -0.03490195795893669,\n",
       "  1.0204366445541382,\n",
       "  0.920631468296051,\n",
       "  0.6667734980583191],\n",
       " [0.00522497808560729,\n",
       "  1.0592433214187622,\n",
       "  0.2269735038280487,\n",
       "  0.1800757646560669,\n",
       "  1.177268385887146,\n",
       "  0.8310421705245972,\n",
       "  -0.0851120725274086,\n",
       "  0.5035482048988342],\n",
       " [0.2975998520851135,\n",
       "  0.8064444661140442,\n",
       "  -0.0661138966679573,\n",
       "  -0.8295859694480896,\n",
       "  0.7061238884925842,\n",
       "  0.8663055300712585,\n",
       "  0.5299340486526489,\n",
       "  0.4862222969532013],\n",
       " [0.7263649702072144,\n",
       "  0.09446768462657928,\n",
       "  0.8933952450752258,\n",
       "  -0.4497760534286499,\n",
       "  0.8058443069458008,\n",
       "  0.2425367385149002,\n",
       "  1.396353840827942,\n",
       "  0.563052237033844],\n",
       " [0.3839539885520935,\n",
       "  0.7282782196998596,\n",
       "  1.212318778038025,\n",
       "  0.4261894226074219,\n",
       "  0.07833203673362732,\n",
       "  1.0120736360549927,\n",
       "  -0.18252773582935333,\n",
       "  1.3580633401870728],\n",
       " [0.6570674180984497,\n",
       "  0.6796287298202515,\n",
       "  -0.49393969774246216,\n",
       "  -0.2153170108795166,\n",
       "  0.3603777289390564,\n",
       "  0.137309730052948,\n",
       "  1.3760960102081299,\n",
       "  0.9498162269592285],\n",
       " [1.0676186084747314,\n",
       "  2.209193229675293,\n",
       "  0.7915538549423218,\n",
       "  0.02912929654121399,\n",
       "  0.2635575234889984,\n",
       "  1.1975409984588623,\n",
       "  0.15380337834358215,\n",
       "  -0.27338406443595886],\n",
       " [0.6681488752365112,\n",
       "  0.424796998500824,\n",
       "  -0.021005665883421898,\n",
       "  0.9207539558410645,\n",
       "  0.8836610913276672,\n",
       "  0.45215198397636414,\n",
       "  0.9294887781143188,\n",
       "  0.409376323223114],\n",
       " [0.4538514316082001,\n",
       "  0.00935031846165657,\n",
       "  -0.2988351881504059,\n",
       "  -0.3043774962425232,\n",
       "  0.36880621314048767,\n",
       "  0.8153471946716309,\n",
       "  1.0106143951416016,\n",
       "  1.275555968284607],\n",
       " [0.726807713508606,\n",
       "  1.5195404291152954,\n",
       "  0.921931266784668,\n",
       "  2.021548271179199,\n",
       "  1.0035730600357056,\n",
       "  0.48184406757354736,\n",
       "  0.5714308619499207,\n",
       "  2.3149890899658203],\n",
       " [0.10742609202861786,\n",
       "  0.8301975131034851,\n",
       "  0.055366963148117065,\n",
       "  0.671789824962616,\n",
       "  0.6173340082168579,\n",
       "  0.3647671639919281,\n",
       "  1.0987956523895264,\n",
       "  -0.11046690493822098],\n",
       " [1.8518093824386597,\n",
       "  1.4940919876098633,\n",
       "  0.46032124757766724,\n",
       "  0.6791858077049255,\n",
       "  0.7957040071487427,\n",
       "  -0.1715833693742752,\n",
       "  1.2952808141708374,\n",
       "  -0.22201302647590637],\n",
       " [1.4590705633163452,\n",
       "  -0.024394001811742783,\n",
       "  0.8681504130363464,\n",
       "  1.3634161949157715,\n",
       "  1.2020343542099,\n",
       "  0.8399321436882019,\n",
       "  0.7074353098869324,\n",
       "  1.1966806650161743],\n",
       " [1.206592082977295,\n",
       "  1.5290743112564087,\n",
       "  1.4625062942504883,\n",
       "  1.3183199167251587,\n",
       "  0.8079599738121033,\n",
       "  1.2530597448349,\n",
       "  0.2964257597923279,\n",
       "  1.7012945413589478],\n",
       " [0.23744837939739227,\n",
       "  -0.40892449021339417,\n",
       "  0.45564913749694824,\n",
       "  0.9539667367935181,\n",
       "  0.45330315828323364,\n",
       "  0.6427394151687622,\n",
       "  2.1651062965393066,\n",
       "  0.45300400257110596],\n",
       " [1.1088565587997437,\n",
       "  1.0284892320632935,\n",
       "  0.8455905914306641,\n",
       "  0.44362688064575195,\n",
       "  0.20423926413059235,\n",
       "  1.4118196964263916,\n",
       "  0.6883041858673096,\n",
       "  1.060295820236206],\n",
       " [0.11269007623195648,\n",
       "  -0.4214974343776703,\n",
       "  1.3352781534194946,\n",
       "  1.374683141708374,\n",
       "  0.24926240742206573,\n",
       "  1.2196691036224365,\n",
       "  0.30621537566185,\n",
       "  1.5557289123535156],\n",
       " [0.5174117684364319,\n",
       "  1.3464765548706055,\n",
       "  0.6041826605796814,\n",
       "  0.8579862117767334,\n",
       "  1.1107572317123413,\n",
       "  0.06005809083580971,\n",
       "  0.1940934956073761,\n",
       "  0.7053118944168091],\n",
       " [-0.13123492896556854,\n",
       "  -0.016795404255390167,\n",
       "  0.06256788969039917,\n",
       "  0.6004949808120728,\n",
       "  1.5595862865447998,\n",
       "  0.2785467505455017,\n",
       "  0.16029894351959229,\n",
       "  0.1422906219959259],\n",
       " [1.4414560794830322,\n",
       "  1.0570769309997559,\n",
       "  0.868442177772522,\n",
       "  0.4539620280265808,\n",
       "  0.08614809066057205,\n",
       "  1.372676968574524,\n",
       "  1.2798688411712646,\n",
       "  1.1097421646118164],\n",
       " [0.2684161067008972,\n",
       "  0.439188152551651,\n",
       "  0.7649790644645691,\n",
       "  1.31251859664917,\n",
       "  1.30439293384552,\n",
       "  0.6975262761116028,\n",
       "  0.9592947363853455,\n",
       "  0.958408772945404],\n",
       " [0.3814082741737366,\n",
       "  0.453965961933136,\n",
       "  0.7196049690246582,\n",
       "  0.42421868443489075,\n",
       "  0.7008553743362427,\n",
       "  0.7288287878036499,\n",
       "  0.6543476581573486,\n",
       "  0.952244758605957],\n",
       " [0.3671402931213379,\n",
       "  0.5315374135971069,\n",
       "  0.4745793640613556,\n",
       "  0.7873765230178833,\n",
       "  0.709794282913208,\n",
       "  0.21623584628105164,\n",
       "  1.0196788311004639,\n",
       "  1.0949370861053467],\n",
       " [0.7354912757873535,\n",
       "  0.6874397397041321,\n",
       "  0.1763080209493637,\n",
       "  0.4025425910949707,\n",
       "  1.027233362197876,\n",
       "  0.39462319016456604,\n",
       "  0.07225216925144196,\n",
       "  0.1704787313938141],\n",
       " [0.6506420373916626,\n",
       "  -0.04041002318263054,\n",
       "  0.1438077688217163,\n",
       "  1.0109241008758545,\n",
       "  0.6514233350753784,\n",
       "  -0.8521170020103455,\n",
       "  1.3378108739852905,\n",
       "  0.1001066043972969],\n",
       " [0.9261475205421448,\n",
       "  0.8001077175140381,\n",
       "  0.09820841997861862,\n",
       "  -0.3343108594417572,\n",
       "  0.04634549841284752,\n",
       "  1.917747139930725,\n",
       "  1.0293104648590088,\n",
       "  -1.0091208219528198],\n",
       " [-0.1385919749736786,\n",
       "  0.7082226276397705,\n",
       "  0.6447237730026245,\n",
       "  0.7746997475624084,\n",
       "  1.6993699073791504,\n",
       "  1.3641350269317627,\n",
       "  0.12773475050926208,\n",
       "  0.5222804546356201],\n",
       " [1.4062235355377197,\n",
       "  0.03826115280389786,\n",
       "  -0.259659081697464,\n",
       "  1.4203596115112305,\n",
       "  0.379154235124588,\n",
       "  0.630120038986206,\n",
       "  -0.18724484741687775,\n",
       "  0.8353606462478638],\n",
       " [1.1043081283569336,\n",
       "  0.7791976928710938,\n",
       "  -0.19114869832992554,\n",
       "  0.11566777527332306,\n",
       "  1.0171705484390259,\n",
       "  0.3010592460632324,\n",
       "  1.053039312362671,\n",
       "  0.4721204936504364],\n",
       " [-0.2952454388141632,\n",
       "  0.6775184273719788,\n",
       "  0.48211121559143066,\n",
       "  0.978454053401947,\n",
       "  0.4149613082408905,\n",
       "  1.0622574090957642,\n",
       "  0.7691749930381775,\n",
       "  0.3581303060054779],\n",
       " [0.24229146540164948,\n",
       "  0.5553103685379028,\n",
       "  -0.8581454753875732,\n",
       "  1.2745834589004517,\n",
       "  -0.8763539791107178,\n",
       "  0.701775074005127,\n",
       "  0.10425778478384018,\n",
       "  0.2401946783065796],\n",
       " [0.8545948266983032,\n",
       "  -0.17451059818267822,\n",
       "  1.2598119974136353,\n",
       "  -0.9395562410354614,\n",
       "  1.396363615989685,\n",
       "  1.4848977327346802,\n",
       "  0.4524111747741699,\n",
       "  -0.35299187898635864],\n",
       " [0.15612494945526123,\n",
       "  -0.3523448705673218,\n",
       "  0.7272168397903442,\n",
       "  0.5041846036911011,\n",
       "  -0.15174543857574463,\n",
       "  0.015094814822077751,\n",
       "  1.5621932744979858,\n",
       "  0.8149137496948242],\n",
       " [1.0070276260375977,\n",
       "  -0.05539850518107414,\n",
       "  0.09244812279939651,\n",
       "  0.6981140971183777,\n",
       "  -0.7125719785690308,\n",
       "  0.9088519215583801,\n",
       "  0.6550078988075256,\n",
       "  -0.3604903817176819],\n",
       " [0.9765928983688354,\n",
       "  0.8855018615722656,\n",
       "  0.252682089805603,\n",
       "  0.20546267926692963,\n",
       "  0.5478521585464478,\n",
       "  1.3398078680038452,\n",
       "  0.8862090110778809,\n",
       "  1.2731441259384155],\n",
       " [0.3954082727432251,\n",
       "  1.588169813156128,\n",
       "  0.3782908618450165,\n",
       "  0.9468039274215698,\n",
       "  1.1624008417129517,\n",
       "  1.2969897985458374,\n",
       "  0.766116201877594,\n",
       "  0.20078040659427643],\n",
       " [0.9564096927642822,\n",
       "  0.5308309197425842,\n",
       "  0.11746668070554733,\n",
       "  0.10570693761110306,\n",
       "  0.4627830386161804,\n",
       "  1.0371869802474976,\n",
       "  0.20262941718101501,\n",
       "  0.3032601773738861],\n",
       " [0.41498973965644836,\n",
       "  0.028873374685645103,\n",
       "  0.18186670541763306,\n",
       "  1.2770318984985352,\n",
       "  -1.0538816452026367,\n",
       "  -0.24003906548023224,\n",
       "  -0.08495943993330002,\n",
       "  1.6389695405960083],\n",
       " [0.2652709186077118,\n",
       "  1.3261181116104126,\n",
       "  0.6337336897850037,\n",
       "  0.8555748462677002,\n",
       "  0.5652663111686707,\n",
       "  2.0087316036224365,\n",
       "  -0.2759217321872711,\n",
       "  -0.0914098247885704],\n",
       " [0.1025044396519661,\n",
       "  2.2304728031158447,\n",
       "  0.5630025863647461,\n",
       "  1.5580294132232666,\n",
       "  1.1487301588058472,\n",
       "  0.4696703255176544,\n",
       "  0.03375399857759476,\n",
       "  0.31965160369873047],\n",
       " [1.1937085390090942,\n",
       "  0.5213587880134583,\n",
       "  1.4633442163467407,\n",
       "  0.8740283846855164,\n",
       "  0.32340532541275024,\n",
       "  0.9041404724121094,\n",
       "  1.054687738418579,\n",
       "  1.65773606300354],\n",
       " [0.7057143449783325,\n",
       "  0.7046194076538086,\n",
       "  1.4697520732879639,\n",
       "  0.958602786064148,\n",
       "  -0.9306812882423401,\n",
       "  0.8857044577598572,\n",
       "  1.0786972045898438,\n",
       "  1.0379245281219482],\n",
       " [0.3645034730434418,\n",
       "  1.3554524183273315,\n",
       "  0.07599811255931854,\n",
       "  1.7383042573928833,\n",
       "  0.24458803236484528,\n",
       "  0.24323605000972748,\n",
       "  1.348103642463684,\n",
       "  1.2751131057739258],\n",
       " [1.204211711883545,\n",
       "  0.7197462320327759,\n",
       "  1.5908452272415161,\n",
       "  1.4398975372314453,\n",
       "  0.09651727974414825,\n",
       "  0.214004248380661,\n",
       "  0.6956046223640442,\n",
       "  0.883601188659668],\n",
       " [0.026900697499513626,\n",
       "  -0.11655552685260773,\n",
       "  0.42784756422042847,\n",
       "  -0.11225815117359161,\n",
       "  0.7499053478240967,\n",
       "  1.6210694313049316,\n",
       "  0.6962728500366211,\n",
       "  0.35159698128700256],\n",
       " [0.5431519746780396,\n",
       "  0.8466474413871765,\n",
       "  1.3336379528045654,\n",
       "  -0.21684546768665314,\n",
       "  0.5499700307846069,\n",
       "  0.13268263638019562,\n",
       "  0.12403228878974915,\n",
       "  0.8910006880760193],\n",
       " [0.7507150173187256,\n",
       "  1.5242588520050049,\n",
       "  1.3608232736587524,\n",
       "  0.5221432447433472,\n",
       "  -0.6173688173294067,\n",
       "  0.8822352886199951,\n",
       "  0.04046197608113289,\n",
       "  -0.35247501730918884],\n",
       " [0.4271627366542816,\n",
       "  0.8299753665924072,\n",
       "  0.4768046736717224,\n",
       "  -0.7315855026245117,\n",
       "  0.45876410603523254,\n",
       "  0.2796204090118408,\n",
       "  0.07700302451848984,\n",
       "  1.0724687576293945]]"
      ]
     },
     "execution_count": 8,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "all_global_losses"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "536c53e8",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[1.2789414210506524,\n",
       " 1.1444351660195289,\n",
       " 0.8576895991058547,\n",
       " 0.9957846266913529,\n",
       " 1.1940623428541408,\n",
       " 1.0515854056410228,\n",
       " 0.9158763950610106,\n",
       " 1.041106713747888,\n",
       " 0.8094246498029879,\n",
       " 1.077270659413497,\n",
       " 1.076344731246516,\n",
       " 0.7892422253842728,\n",
       " 1.1280148443047857,\n",
       " 0.6775278841694938,\n",
       " 0.7447525572334116,\n",
       " 0.914468572791752,\n",
       " 0.7643533884975461,\n",
       " 0.8704223425926917,\n",
       " 0.9459259392632074,\n",
       " 1.0660546684803391,\n",
       " 0.8605685407104281,\n",
       " 0.885949229192707,\n",
       " 1.049452661382768,\n",
       " 0.9209729214600941,\n",
       " 0.8148851552041058,\n",
       " 1.1792700629629054,\n",
       " 0.778919993230087,\n",
       " 1.4497533176476076,\n",
       " 0.9808222186454226,\n",
       " 1.0706228041210135,\n",
       " 1.0902701643237604,\n",
       " 0.8993412355990054,\n",
       " 0.9618567859269602,\n",
       " 1.016686919203627,\n",
       " 1.0493712189130104,\n",
       " 1.208850797059952,\n",
       " 1.1282282507989185,\n",
       " 0.7458944754669243,\n",
       " 1.318647920857263,\n",
       " 1.11380432064123,\n",
       " 0.805378528115097,\n",
       " 0.9872080735358896,\n",
       " 1.1021752771341824,\n",
       " 1.0853954444694542,\n",
       " 1.1937132611504246,\n",
       " 1.237953694307133,\n",
       " 0.7308603683823252,\n",
       " 0.9978174070821546,\n",
       " 0.840081881570296,\n",
       " 0.9509966427483894,\n",
       " 1.121900993495651,\n",
       " 0.6898015883219595,\n",
       " 0.8411843318799597,\n",
       " 0.7920940682757776,\n",
       " 1.1260139861552154,\n",
       " 1.0909879397099242,\n",
       " 1.122846237519303,\n",
       " 0.6528747678577818,\n",
       " 1.0732948074922257,\n",
       " 0.5948346773495066,\n",
       " 0.7627993195503463,\n",
       " 1.202169456548781,\n",
       " 1.000030305848548,\n",
       " 1.2233434357896642,\n",
       " 0.7883680032365468,\n",
       " 0.8991554424556865,\n",
       " 1.0498277784676324,\n",
       " 0.9655879590607488,\n",
       " 1.1310002079500192,\n",
       " 0.8231447408561201,\n",
       " 1.3175819371863164,\n",
       " 1.19207907462663,\n",
       " 1.0230348692468323,\n",
       " 0.9516580467151343,\n",
       " 1.0343795565068605,\n",
       " 1.2958321629134768,\n",
       " 1.22973217149893,\n",
       " 1.0101683816248248,\n",
       " 0.771083568919205,\n",
       " 0.8177430397307591,\n",
       " 0.6004059675226182,\n",
       " 0.7502062603480097,\n",
       " 1.1597587815021897,\n",
       " 0.67391946045594,\n",
       " 1.0904954233885598,\n",
       " 0.8942784155406722,\n",
       " 0.899993024838557,\n",
       " 0.8595597584154496,\n",
       " 1.1344230628140748,\n",
       " 0.9236291990583323,\n",
       " 1.0734187237442279,\n",
       " 0.839673313350288,\n",
       " 0.9032077241057525,\n",
       " 0.7524031176712599,\n",
       " 1.156161166444619,\n",
       " 0.9921729334269938,\n",
       " 1.2661944143130506,\n",
       " 0.9944545654754146,\n",
       " 0.8428173358693422,\n",
       " 0.9858365665471526,\n",
       " 1.1550364871331906,\n",
       " 1.004638200745789,\n",
       " 0.76481666544631,\n",
       " 1.199862112551204,\n",
       " 0.9390123638360952,\n",
       " 0.9574517563159382,\n",
       " 1.2705728039894775,\n",
       " 0.7840986656729837,\n",
       " 0.7626064058854152,\n",
       " 1.3137133307455413,\n",
       " 0.8340472143164365,\n",
       " 0.9311650295053677,\n",
       " 1.183079561772522,\n",
       " 0.9402723483590193,\n",
       " 0.933778800618082,\n",
       " 1.4001454662529456,\n",
       " 1.1026053508687774,\n",
       " 1.1238843436836423,\n",
       " 0.9790759283865151,\n",
       " 0.8879207247307959,\n",
       " 0.9448859190571819,\n",
       " 0.5848119171533821,\n",
       " 0.8037870740808079,\n",
       " 1.228855393584556,\n",
       " 1.5870973632160275,\n",
       " 0.5631007711443137,\n",
       " 0.9867132299752102,\n",
       " 1.0291078351725373,\n",
       " 1.0273270862529003,\n",
       " 1.0459751205929508,\n",
       " 1.1676436429310963,\n",
       " 1.2340209961350956,\n",
       " 0.9151137188414783,\n",
       " 0.8763611379234644,\n",
       " 0.7236397750265928,\n",
       " 1.3197058543123044,\n",
       " 1.1732292987683741,\n",
       " 0.7906999224094271,\n",
       " 1.069012867151624,\n",
       " 0.8409908746475121,\n",
       " 0.8521135803535274,\n",
       " 0.9554468654438175,\n",
       " 1.1305227614741191,\n",
       " 1.2052599629478513,\n",
       " 1.3513950803024586,\n",
       " 1.2856081805427775,\n",
       " 1.0283194524364885,\n",
       " 0.9722923066129932,\n",
       " 1.19710315225399,\n",
       " 1.02005624691803,\n",
       " 0.9431069666280019,\n",
       " 1.1658421883590382,\n",
       " 1.0652443383874433,\n",
       " 0.7565169055961083,\n",
       " 0.7010806492484793,\n",
       " 0.8393338748698556,\n",
       " 0.9415658071638927,\n",
       " 0.7516547106573682,\n",
       " 1.0751205910296704,\n",
       " 0.8239083458899201,\n",
       " 0.7661116964665197,\n",
       " 1.424540090914328,\n",
       " 0.8250989881547743,\n",
       " 1.1890674799872425,\n",
       " 1.1901293277602072,\n",
       " 1.4300039219073801,\n",
       " 0.9664609988096625,\n",
       " 1.1676811417977444,\n",
       " 1.0742066760999387,\n",
       " 1.0567402629321248,\n",
       " 0.6091478329616423,\n",
       " 1.2636786586109807,\n",
       " 1.0885532236309774,\n",
       " 0.8882422592246964,\n",
       " 1.0784806322107658,\n",
       " 0.8493340314745355,\n",
       " 0.7796384178698106,\n",
       " 0.8895941286031381,\n",
       " 1.1063750582532885,\n",
       " 0.8702675848323798,\n",
       " 0.9723567845363212,\n",
       " 0.8972549856861003,\n",
       " 0.5716861053344369,\n",
       " 0.8470046986002558,\n",
       " 0.8408002943787669,\n",
       " 0.6036623615966665,\n",
       " 1.0707731257820696,\n",
       " 1.2502567006268857,\n",
       " 0.8842383417419162,\n",
       " 0.7452710513205776,\n",
       " 0.9594335958740883,\n",
       " 1.1712386782685018,\n",
       " 1.2336139380633735,\n",
       " 1.050928258047929,\n",
       " 1.0838444206983695,\n",
       " 1.2681369067831545,\n",
       " 0.9496375791297514,\n",
       " 0.907608328771044,\n",
       " 0.9503862704597822,\n",
       " 0.7008634415507119]"
      ]
     },
     "execution_count": 9,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "all_bsne_losses"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "e9aba55f",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.3391261503361906,\n",
       " 0.4825641392837762,\n",
       " 0.3404793008880216,\n",
       " 0.2596116162601227,\n",
       " 0.33965131266593795,\n",
       " 0.38104282411026924,\n",
       " 0.3234525720974003,\n",
       " 0.28502643089583446,\n",
       " 0.3640787178836963,\n",
       " 0.4135935220684215,\n",
       " 0.3678376255383937,\n",
       " 0.3629521305318253,\n",
       " 0.43393352480875,\n",
       " 0.2796305596446741,\n",
       " 0.3973107638139909,\n",
       " 0.3371190357395379,\n",
       " 0.40820093317510153,\n",
       " 0.3725511833660581,\n",
       " 0.3922140164239236,\n",
       " 0.3327938990833189,\n",
       " 0.3934554020682659,\n",
       " 0.36606304750290813,\n",
       " 0.3516584996458027,\n",
       " 0.5499844444386384,\n",
       " 0.33491104179935505,\n",
       " 0.36500497571601054,\n",
       " 0.5267966671669309,\n",
       " 0.28002518009486954,\n",
       " 0.2401699098460963,\n",
       " 0.3223103010215719,\n",
       " 0.3830649209150805,\n",
       " 0.31730694576212193,\n",
       " 0.24050744972004728,\n",
       " 0.33671014854930614,\n",
       " 0.35246446044068713,\n",
       " 0.3433346243379699,\n",
       " 0.3240796010020749,\n",
       " 0.3320280435489292,\n",
       " 0.451896443269295,\n",
       " 0.28363679239935735,\n",
       " 0.3025990629463825,\n",
       " 0.35483483974444885,\n",
       " 0.2756391402220003,\n",
       " 0.35075039445042844,\n",
       " 0.34833491953149937,\n",
       " 0.21133126109175338,\n",
       " 0.3851313182722308,\n",
       " 0.30165000697536687,\n",
       " 0.34218717443443986,\n",
       " 0.3397929536202288,\n",
       " 0.35125739230162684,\n",
       " 0.33139926720682333,\n",
       " 0.31028941677232197,\n",
       " 0.30564734772780344,\n",
       " 0.25398962138575665,\n",
       " 0.43347547980596884,\n",
       " 0.4991391339763195,\n",
       " 0.37717912167607903,\n",
       " 0.39098672036680515,\n",
       " 0.2801705444564211,\n",
       " 0.22956063851709269,\n",
       " 0.37014283894726724,\n",
       " 0.2938541124175281,\n",
       " 0.3015261368110972,\n",
       " 0.30832574123372103,\n",
       " 0.27880639102486504,\n",
       " 0.3598541372806797,\n",
       " 0.30221039466031435,\n",
       " 0.2812094038619034,\n",
       " 0.29743154855711185,\n",
       " 0.3612334565295734,\n",
       " 0.43120972061700175,\n",
       " 0.4649088499329006,\n",
       " 0.2636718125121534,\n",
       " 0.3926630389988307,\n",
       " 0.3551040424076671,\n",
       " 0.28379100640932275,\n",
       " 0.43158404960573415,\n",
       " 0.26426748349120555,\n",
       " 0.35959925203938936,\n",
       " 0.3001108660439223,\n",
       " 0.32294402677779166,\n",
       " 0.3836277358468423,\n",
       " 0.33951232321472546,\n",
       " 0.4299602778375935,\n",
       " 0.2530278522208997,\n",
       " 0.3627946004992627,\n",
       " 0.3941346768643744,\n",
       " 0.2750238372512638,\n",
       " 0.36128477835706707,\n",
       " 0.3587949321711242,\n",
       " 0.3192298962183874,\n",
       " 0.3886712390298018,\n",
       " 0.3692554016147045,\n",
       " 0.29688128800709984,\n",
       " 0.3439640610118684,\n",
       " 0.28750465434333955,\n",
       " 0.36521236879629937,\n",
       " 0.304848367828101,\n",
       " 0.44283555471826064,\n",
       " 0.277433585823462,\n",
       " 0.35044011299392375,\n",
       " 0.35563747154727454,\n",
       " 0.40759605288433165,\n",
       " 0.35066366596512544,\n",
       " 0.299516785162202,\n",
       " 0.2426472418759541,\n",
       " 0.46178049347426336,\n",
       " 0.36209351685781965,\n",
       " 0.21514450383170777,\n",
       " 0.3547939892879216,\n",
       " 0.39517750425833026,\n",
       " 0.3188478039528695,\n",
       " 0.4005850385374173,\n",
       " 0.3140136344248351,\n",
       " 0.3306698825208173,\n",
       " 0.2963363459277908,\n",
       " 0.5225792966335239,\n",
       " 0.25227946385762673,\n",
       " 0.3293561558610262,\n",
       " 0.28996270541136604,\n",
       " 0.3489063034031628,\n",
       " 0.2662122203327248,\n",
       " 0.3280017634292311,\n",
       " 0.217852065009221,\n",
       " 0.3018088853958081,\n",
       " 0.19760374288867544,\n",
       " 0.3304248722557459,\n",
       " 0.37974389962646593,\n",
       " 0.3996284882017364,\n",
       " 0.39058053301911816,\n",
       " 0.2344028421159783,\n",
       " 0.29198971543113905,\n",
       " 0.31692112420374186,\n",
       " 0.31606246154383144,\n",
       " 0.33733697901738036,\n",
       " 0.4478625219592778,\n",
       " 0.35499172930098766,\n",
       " 0.2622502901170909,\n",
       " 0.3090396951524307,\n",
       " 0.5039234035289539,\n",
       " 0.2216993458911099,\n",
       " 0.4801714536535904,\n",
       " 0.31317188312280775,\n",
       " 0.3479717443404607,\n",
       " 0.4468640654583677,\n",
       " 0.3644098830538208,\n",
       " 0.37041455921838295,\n",
       " 0.3843948023555327,\n",
       " 0.34991271528262646,\n",
       " 0.3508812706677782,\n",
       " 0.3036993051178616,\n",
       " 0.3153992671573596,\n",
       " 0.26923387349965155,\n",
       " 0.3514643722168788,\n",
       " 0.3053040036444985,\n",
       " 0.31448059640842524,\n",
       " 0.3202748187253576,\n",
       " 0.39524398784353987,\n",
       " 0.24048680334786487,\n",
       " 0.34982259185211284,\n",
       " 0.2293319959002328,\n",
       " 0.37069769904190847,\n",
       " 0.41621787363717827,\n",
       " 0.23858862247796608,\n",
       " 0.23309984378142654,\n",
       " 0.3474244193050533,\n",
       " 0.3187783630977503,\n",
       " 0.35770295909468586,\n",
       " 0.3822055244026512,\n",
       " 0.2771784404757257,\n",
       " 0.3050069984484722,\n",
       " 0.25046264367420745,\n",
       " 0.26130795066599377,\n",
       " 0.4283206771860465,\n",
       " 0.39103789640659564,\n",
       " 0.4043649559505328,\n",
       " 0.4552897436227873,\n",
       " 0.3935532691674234,\n",
       " 0.3374456734452343,\n",
       " 0.3909299722004724,\n",
       " 0.34133470144466227,\n",
       " 0.3986969334047794,\n",
       " 0.3493771199245089,\n",
       " 0.43134555402770086,\n",
       " 0.32453876148998,\n",
       " 0.2748665392095416,\n",
       " 0.40838668513981075,\n",
       " 0.41970411121165413,\n",
       " 0.47491466279081496,\n",
       " 0.2985131053633265,\n",
       " 0.36801176214904574,\n",
       " 0.23456274715064987,\n",
       " 0.31213656877123397,\n",
       " 0.2531820342064749,\n",
       " 0.41258340084092904,\n",
       " 0.4940401798614522,\n",
       " 0.38207363561933155,\n",
       " 0.4365370432362368,\n",
       " 0.33958674511460474]"
      ]
     },
     "execution_count": 10,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "epoch_avg_local"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "ab85afd6",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.9398152707144618,\n",
       " 0.6618710267357528,\n",
       " 0.517210298217833,\n",
       " 0.7361730104312301,\n",
       " 0.8544110301882029,\n",
       " 0.6705425815307535,\n",
       " 0.5924238229636103,\n",
       " 0.7560802828520536,\n",
       " 0.4453459319192916,\n",
       " 0.6636771373450756,\n",
       " 0.7085071057081223,\n",
       " 0.4262900948524475,\n",
       " 0.6940813194960356,\n",
       " 0.39789732452481985,\n",
       " 0.3474417934194207,\n",
       " 0.5773495370522141,\n",
       " 0.35615245532244444,\n",
       " 0.4978711592266336,\n",
       " 0.5537119228392839,\n",
       " 0.7332607693970203,\n",
       " 0.4671131386421621,\n",
       " 0.5198861816897988,\n",
       " 0.6977941617369652,\n",
       " 0.37098847702145576,\n",
       " 0.4799741134047508,\n",
       " 0.8142650872468948,\n",
       " 0.25212332606315613,\n",
       " 1.1697281375527382,\n",
       " 0.7406523087993264,\n",
       " 0.7483125030994415,\n",
       " 0.70720524340868,\n",
       " 0.5820342898368835,\n",
       " 0.721349336206913,\n",
       " 0.6799767706543207,\n",
       " 0.6969067584723234,\n",
       " 0.865516172721982,\n",
       " 0.8041486497968435,\n",
       " 0.4138664319179952,\n",
       " 0.8667514775879681,\n",
       " 0.8301675282418728,\n",
       " 0.5027794651687145,\n",
       " 0.6323732337914407,\n",
       " 0.826536136912182,\n",
       " 0.7346450500190258,\n",
       " 0.8453783416189253,\n",
       " 1.0266224332153797,\n",
       " 0.34572905011009425,\n",
       " 0.6961674001067877,\n",
       " 0.49789470713585615,\n",
       " 0.6112036891281605,\n",
       " 0.7706436011940241,\n",
       " 0.35840232111513615,\n",
       " 0.5308949151076376,\n",
       " 0.4864467205479741,\n",
       " 0.8720243647694588,\n",
       " 0.6575124599039555,\n",
       " 0.6237071035429835,\n",
       " 0.2756956461817026,\n",
       " 0.6823080871254206,\n",
       " 0.3146641328930855,\n",
       " 0.5332386810332537,\n",
       " 0.8320266176015139,\n",
       " 0.7061761934310198,\n",
       " 0.9218172989785671,\n",
       " 0.48004226200282574,\n",
       " 0.6203490514308214,\n",
       " 0.6899736411869526,\n",
       " 0.6633775644004345,\n",
       " 0.8497908040881157,\n",
       " 0.5257131922990084,\n",
       " 0.956348480656743,\n",
       " 0.7608693540096283,\n",
       " 0.5581260193139315,\n",
       " 0.687986234202981,\n",
       " 0.6417165175080299,\n",
       " 0.9407281205058098,\n",
       " 0.9459411650896072,\n",
       " 0.5785843320190907,\n",
       " 0.5068160854279995,\n",
       " 0.45814378769136965,\n",
       " 0.30029510147869587,\n",
       " 0.4272622335702181,\n",
       " 0.7761310456553474,\n",
       " 0.3344071372412145,\n",
       " 0.6605351455509663,\n",
       " 0.6412505633197725,\n",
       " 0.5371984243392944,\n",
       " 0.465425081551075,\n",
       " 0.8593992255628109,\n",
       " 0.5623444207012653,\n",
       " 0.7146237915731035,\n",
       " 0.5204434171319008,\n",
       " 0.5145364850759506,\n",
       " 0.3831477160565555,\n",
       " 0.8592798784375191,\n",
       " 0.6482088724151254,\n",
       " 0.9786897599697113,\n",
       " 0.6292421966791153,\n",
       " 0.5379689680412412,\n",
       " 0.5430010118288919,\n",
       " 0.8776029013097286,\n",
       " 0.6541980877518654,\n",
       " 0.40917919389903545,\n",
       " 0.792266059666872,\n",
       " 0.5883486978709698,\n",
       " 0.6579349711537361,\n",
       " 1.0279255621135235,\n",
       " 0.3223181721987203,\n",
       " 0.4005128890275955,\n",
       " 1.0985688269138336,\n",
       " 0.47925322502851486,\n",
       " 0.5359875252470374,\n",
       " 0.8642317578196526,\n",
       " 0.539687309821602,\n",
       " 0.6197651661932468,\n",
       " 1.0694755837321281,\n",
       " 0.8062690049409866,\n",
       " 0.6013050470501184,\n",
       " 0.7267964645288885,\n",
       " 0.5585645688697696,\n",
       " 0.6549232136458158,\n",
       " 0.23590561375021935,\n",
       " 0.5375748537480831,\n",
       " 0.9008536301553249,\n",
       " 1.3692452982068062,\n",
       " 0.2612918857485056,\n",
       " 0.7891094870865345,\n",
       " 0.6986829629167914,\n",
       " 0.6475831866264343,\n",
       " 0.6463466323912144,\n",
       " 0.7770631099119782,\n",
       " 0.9996181540191174,\n",
       " 0.6231240034103394,\n",
       " 0.5594400137197226,\n",
       " 0.4075773134827614,\n",
       " 0.9823688752949238,\n",
       " 0.7253667768090963,\n",
       " 0.43570819310843945,\n",
       " 0.806762577034533,\n",
       " 0.5319511794950813,\n",
       " 0.3481901768245734,\n",
       " 0.7337475195527077,\n",
       " 0.6503513078205287,\n",
       " 0.8920880798250437,\n",
       " 1.0034233359619975,\n",
       " 0.8387441150844097,\n",
       " 0.6639095693826675,\n",
       " 0.6018777473946102,\n",
       " 0.8127083498984575,\n",
       " 0.6701435316354036,\n",
       " 0.5922256959602237,\n",
       " 0.8621428832411766,\n",
       " 0.7498450712300837,\n",
       " 0.48728303209645674,\n",
       " 0.3496162770316005,\n",
       " 0.5340298712253571,\n",
       " 0.6270852107554674,\n",
       " 0.43137989193201065,\n",
       " 0.6798766031861305,\n",
       " 0.5834215425420552,\n",
       " 0.4162891046144068,\n",
       " 1.1952080950140953,\n",
       " 0.4544012891128659,\n",
       " 0.7728496063500643,\n",
       " 0.9515407052822411,\n",
       " 1.1969040781259537,\n",
       " 0.6190365795046091,\n",
       " 0.8489027786999941,\n",
       " 0.7165037170052528,\n",
       " 0.6745347385294735,\n",
       " 0.3319693924859166,\n",
       " 0.9586716601625085,\n",
       " 0.8380905799567699,\n",
       " 0.6269343085587025,\n",
       " 0.6501599550247192,\n",
       " 0.45829613506793976,\n",
       " 0.3752734619192779,\n",
       " 0.43430438498035073,\n",
       " 0.712821789085865,\n",
       " 0.5328219113871455,\n",
       " 0.5814268123358488,\n",
       " 0.5559202842414379,\n",
       " 0.17298917192965746,\n",
       " 0.4976275786757469,\n",
       " 0.409454740351066,\n",
       " 0.27912360010668635,\n",
       " 0.7959065865725279,\n",
       " 0.8418700154870749,\n",
       " 0.464534230530262,\n",
       " 0.2703563885297626,\n",
       " 0.6609204905107617,\n",
       " 0.8032269161194563,\n",
       " 0.9990511909127235,\n",
       " 0.7387916892766953,\n",
       " 0.8306623864918947,\n",
       " 0.8555535059422255,\n",
       " 0.45559739926829934,\n",
       " 0.5255346931517124,\n",
       " 0.5138492272235453,\n",
       " 0.36127669643610716]"
      ]
     },
     "execution_count": 11,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "epoch_avg_global"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "1e39558c",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "daeba5bd",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "1cf72980",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "7b5196b4",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "tsne01",
   "language": "python",
   "name": "tsne01"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.11.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
