{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "6d9d0ae9",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/data/home/sczc619/run/LML/anaconda3/envs/tsne/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
      "  from .autonotebook import tqdm as notebook_tqdm\n"
     ]
    }
   ],
   "source": [
    "import pickle\n",
    "import os\n",
    "import random as rd\n",
    "import numpy as np\n",
    "import copy\n",
    "import copy as cp\n",
    "import dgl\n",
    "from collections import defaultdict\n",
    "import matplotlib.pyplot as plt\n",
    "import time\n",
    "import scipy.sparse as sp\n",
    "from scipy.io import loadmat\n",
    "import torch\n",
    "import torch.nn.functional as F\n",
    "import torch.nn as nn\n",
    "from torch.nn import TransformerEncoder, TransformerEncoderLayer\n",
    "from torch_geometric.utils import to_dense_adj, subgraph\n",
    "from torch.utils.tensorboard import SummaryWriter\n",
    "from sklearn.preprocessing import MinMaxScaler\n",
    "from sklearn.manifold import TSNE\n",
    "from sklearn.model_selection import train_test_split\n",
    "from sklearn.metrics import f1_score, accuracy_score, recall_score, roc_auc_score, average_precision_score, \\\n",
    "    confusion_matrix\n",
    "\n",
    "filelist = {\n",
    "    'amz_upu': 'amz_upu_adjlists.pickle',\n",
    "    'amz_usu': 'amz_usu_adjlists.pickle',\n",
    "    'amz_uvu': 'amz_uvu_adjlists.pickle',\n",
    "    'yelp_rsr': 'yelp_rsr_adjlists.pickle',\n",
    "    'yelp_rtr': 'yelp_rtr_adjlists.pickle',\n",
    "    'yelp_rur': 'yelp_rur_adjlists.pickle'\n",
    "}\n",
    "\n",
    "file_matrix_prefix = {\n",
    "    'amz_upu': 'amazon_upu_matrix_',\n",
    "    'amz_usu': 'amazon_usu_matrix_',\n",
    "    'amz_uvu': 'amazon_uvu_matrix_',\n",
    "    'yelp_rsr': 'yelpnet_rsr_matrix_decompision_',\n",
    "    'yelp_rtr': 'yelpnet_rtr_matrix_decompision_',\n",
    "    'yelp_rur': 'yelpnet_rur_matrix_decompision_'\n",
    "}\n",
    "\n",
    "\n",
    "def create_node_subgraph(node_idx, feat_data, edge_indexs, device):\n",
    "    \"\"\"\n",
    "    为单个节点创建一阶邻居子图（所有邻居）\n",
    "    \"\"\"\n",
    "    neighbors = set()\n",
    "    for rel_idx in range(len(edge_indexs)):\n",
    "        edge_index = edge_indexs[rel_idx][0].cpu().numpy()\n",
    "        # 找出以中心点为起点的边的终点\n",
    "        rel_neighbors = edge_index[1][edge_index[0] == node_idx].tolist()\n",
    "        neighbors.update(rel_neighbors)\n",
    "\n",
    "    # 移除中心节点自身\n",
    "    neighbors.discard(node_idx)\n",
    "    neighbors = list(neighbors)\n",
    "\n",
    "    # 如果邻居太多，进行随机采样截取\n",
    "    sample_size = 399\n",
    "    if len(neighbors) > sample_size:\n",
    "        neighbors = np.random.choice(neighbors, size=sample_size, replace=False).tolist()\n",
    "\n",
    "    # 构建子图节点列表，确保中心节点是第一个\n",
    "    sub_nodes = [node_idx] + [n for n in neighbors if n != node_idx]\n",
    "\n",
    "    # 构建子图边列表\n",
    "    sub_edge_index = []\n",
    "    for rel_idx in range(len(edge_indexs)):\n",
    "        edge_index = edge_indexs[rel_idx][0].cpu().numpy()\n",
    "        mask = np.isin(edge_index[0], sub_nodes) & np.isin(edge_index[1], sub_nodes)\n",
    "        local_edges = edge_index[:, mask]\n",
    "\n",
    "        # 创建节点映射\n",
    "        node_map = {n: i for i, n in enumerate(sub_nodes)}\n",
    "\n",
    "        # 将全局索引映射到局部索引\n",
    "        if len(local_edges) > 0 and local_edges.size > 0:\n",
    "            src_nodes = [node_map[src] for src in local_edges[0]]\n",
    "            dst_nodes = [node_map[dst] for dst in local_edges[1]]\n",
    "            edge_tensor = torch.tensor([src_nodes, dst_nodes], dtype=torch.long)\n",
    "        else:\n",
    "            # 添加自环确保图不为空\n",
    "            edge_tensor = torch.tensor([[0], [0]], dtype=torch.long)\n",
    "\n",
    "        sub_edge_index.append(edge_tensor.to(device))\n",
    "\n",
    "    # 创建子图数据\n",
    "    subgraph = {\n",
    "        'features': feat_data[sub_nodes].clone(),\n",
    "        'edges': sub_edge_index,\n",
    "        'global_idx': sub_nodes\n",
    "    }\n",
    "\n",
    "    return subgraph\n",
    "\n",
    "\n",
    "def dict_to_edge_index(edge_dict):\n",
    "    source_nodes = []\n",
    "    target_nodes = []\n",
    "    for src, targets in edge_dict.items():\n",
    "        for target in targets:\n",
    "            source_nodes.append(src)\n",
    "            target_nodes.append(target)\n",
    "    edge_index = [source_nodes, target_nodes]\n",
    "    return torch.LongTensor(edge_index)\n",
    "\n",
    "\n",
    "def numpy_array_to_edge_index(np_array):\n",
    "    assert np_array.ndim == 2 and np_array.shape[0] == np_array.shape[1], \"Input must be a square matrix.\"\n",
    "    rows, cols = np.nonzero(np_array)\n",
    "    edge_index = np.vstack((rows, cols))\n",
    "    edge_index_tensor = torch.from_numpy(edge_index).long()\n",
    "    return edge_index_tensor\n",
    "\n",
    "\n",
    "def load_data(data, k=2, prefix=''):\n",
    "    pickle_file = {}\n",
    "    matrix_prefix = {}\n",
    "    for key in filelist:\n",
    "        pickle_file[key] = os.path.join(prefix, filelist[key])\n",
    "        matrix_prefix[key] = os.path.join(prefix, file_matrix_prefix[key])\n",
    "\n",
    "    if data == 'yelp':\n",
    "        data_file = loadmat(os.path.join(prefix, 'YelpChi.mat'))\n",
    "        labels = data_file['label'].flatten()\n",
    "        feat_data = data_file['features'].todense().A\n",
    "        with open(pickle_file['yelp_rur'], 'rb') as file:\n",
    "            relation1 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation1 = dict_to_edge_index(relation1)\n",
    "        relation1_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['yelp_rur'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation1_tree.append(numpy_array_to_edge_index(tree))\n",
    "        with open(pickle_file['yelp_rtr'], 'rb') as file:\n",
    "            relation2 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation2 = dict_to_edge_index(relation2)\n",
    "        relation2_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['yelp_rtr'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation2_tree.append(numpy_array_to_edge_index(tree))\n",
    "        with open(pickle_file['yelp_rsr'], 'rb') as file:\n",
    "            relation3 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation3 = dict_to_edge_index(relation3)\n",
    "        relation3_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['yelp_rsr'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation3_tree.append(numpy_array_to_edge_index(tree))\n",
    "        return [[relation1, relation1_tree], [relation2, relation2_tree],\n",
    "                [relation3, relation3_tree]], feat_data, labels\n",
    "    elif data == 'amazon':\n",
    "        data_file = loadmat(os.path.join(prefix, 'Amazon.mat'))\n",
    "        labels = data_file['label'].flatten()\n",
    "        feat_data = data_file['features'].todense().A\n",
    "        with open(pickle_file['amz_upu'], 'rb') as file:\n",
    "            relation1 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation1 = dict_to_edge_index(relation1)\n",
    "        relation1_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['amz_upu'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation1_tree.append(numpy_array_to_edge_index(tree))\n",
    "        with open(pickle_file['amz_usu'], 'rb') as file:\n",
    "            relation2 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation2 = dict_to_edge_index(relation2)\n",
    "        relation2_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['amz_usu'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation2_tree.append(numpy_array_to_edge_index(tree))\n",
    "        with open(pickle_file['amz_uvu'], 'rb') as file:\n",
    "            relation3 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation3_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['amz_uvu'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation3_tree.append(numpy_array_to_edge_index(tree))\n",
    "        relation3 = dict_to_edge_index(relation3)\n",
    "\n",
    "        return [[relation1, relation1_tree], [relation2, relation2_tree],\n",
    "                [relation3, relation3_tree]], feat_data, labels\n",
    "\n",
    "\n",
    "def Visualization(labels, embedding, prefix):\n",
    "    train_pos, train_neg = pos_neg_split(list(range(len(labels))), labels)\n",
    "    sampled_idx_train = undersample(train_pos, train_neg, scale=1)\n",
    "    tsne = TSNE(n_components=2, random_state=43)\n",
    "    sampled_idx_train = np.array(sampled_idx_train)\n",
    "    sampled_idx_train = np.random.choice(sampled_idx_train, size=5000, replace=True)\n",
    "    ps = embedding[sampled_idx_train]\n",
    "    ls = labels[sampled_idx_train]\n",
    "\n",
    "    X_reduced = tsne.fit_transform(ps)\n",
    "\n",
    "    scaler = MinMaxScaler(feature_range=(0, 1))\n",
    "    X_scaled = scaler.fit_transform(X_reduced)\n",
    "    print(X_scaled.shape)\n",
    "\n",
    "    plt.figure(figsize=(8, 8))\n",
    "\n",
    "    plt.scatter(X_scaled[ls == 0, 0], X_scaled[ls == 0, 1], c='#14517C', label='Label 0', s=3)\n",
    "\n",
    "    plt.scatter(X_scaled[ls == 1, 0], X_scaled[ls == 1, 1], c='#FA7F6F', label='Label 1', s=3)\n",
    "\n",
    "    ax = plt.gca()\n",
    "    ax.spines['top'].set_visible(False)\n",
    "    ax.spines['right'].set_visible(False)\n",
    "    ax.spines['left'].set_visible(False)\n",
    "    ax.spines['bottom'].set_visible(False)\n",
    "\n",
    "    plt.xticks([])\n",
    "    plt.yticks([])\n",
    "\n",
    "    plt.xlim(0, 1)\n",
    "    plt.ylim(0, 1)\n",
    "    filepath = os.path.join(prefix, 'HOGRL.png')\n",
    "    plt.savefig(filepath)\n",
    "    plt.show()\n",
    "\n",
    "\n",
    "def normalize(mx):\n",
    "    rowsum = np.array(mx.sum(1)) + 0.01\n",
    "    r_inv = np.power(rowsum, -1).flatten()\n",
    "    r_inv[np.isinf(r_inv)] = 0.\n",
    "    r_mat_inv = sp.diags(r_inv)\n",
    "    mx = r_mat_inv.dot(mx)\n",
    "    return mx\n",
    "\n",
    "\n",
    "def pos_neg_split(nodes, labels):\n",
    "    # 正负样本分割\n",
    "    pos_nodes = []\n",
    "    neg_nodes = cp.deepcopy(nodes)\n",
    "    aux_nodes = cp.deepcopy(nodes)\n",
    "    for idx, label in enumerate(labels):\n",
    "        if label == 1:\n",
    "            pos_nodes.append(aux_nodes[idx])\n",
    "            neg_nodes.remove(aux_nodes[idx])\n",
    "\n",
    "    return pos_nodes, neg_nodes\n",
    "\n",
    "\n",
    "def undersample(pos_nodes, neg_nodes, scale=1):\n",
    "    # 对负样本进行下采样，平衡正负样本数量\n",
    "    aux_nodes = cp.deepcopy(neg_nodes)\n",
    "    aux_nodes = rd.sample(aux_nodes, k=int(len(pos_nodes) * scale))\n",
    "    batch_nodes = pos_nodes + aux_nodes\n",
    "\n",
    "    return batch_nodes\n",
    "\n",
    "\n",
    "def calculate_g_mean(y_true, y_pred):\n",
    "    cm = confusion_matrix(y_true, y_pred)\n",
    "    sensitivities = []\n",
    "    for i in range(len(cm)):\n",
    "        TP = cm[i, i]\n",
    "        FN = cm[i, :].sum() - TP\n",
    "        sensitivity = TP / (TP + FN) if (TP + FN) != 0 else 0\n",
    "        sensitivities.append(sensitivity)\n",
    "    g_mean = np.prod(sensitivities) ** (1 / len(sensitivities))\n",
    "    return g_mean\n",
    "\n",
    "\n",
    "def iterate_batches(indices, batch_size, shuffle=True):\n",
    "    \"\"\"\n",
    "        将索引列表划分为指定大小的批次\n",
    "        :param indices: 样本索引列表\n",
    "        :param batch_size: 每个批次的大小\n",
    "        :param shuffle: 是否打乱顺序\n",
    "        :return: 生成批次索引的迭代器\n",
    "    \"\"\"\n",
    "    if shuffle:\n",
    "        rd.shuffle(indices)\n",
    "    for i in range(0, len(indices), batch_size):\n",
    "        yield indices[i:i + batch_size]\n",
    "\n",
    "\n",
    "def test(idx_eval, y_eval, model, feat_data, edge_indexs, device, batch_size=64):\n",
    "    model.eval()\n",
    "    all_probs = []\n",
    "    all_labels = []\n",
    "\n",
    "    # 分批处理\n",
    "    for batch_centers in iterate_batches(idx_eval, batch_size, shuffle=False):\n",
    "        subgraph_data = []\n",
    "\n",
    "        # 为每个中心节点构建子图\n",
    "        for xi in batch_centers:\n",
    "            subgraph = create_node_subgraph(xi, feat_data, edge_indexs, device)\n",
    "            subgraph_data.append(subgraph)\n",
    "\n",
    "        # 获取中心节点预测\n",
    "        with torch.no_grad():\n",
    "            center_logits, _ = model(subgraph_data)  # [B, 2]\n",
    "            probs = torch.softmax(center_logits, dim=-1)[:, 1]  # 正类概率\n",
    "            all_probs.extend(probs.cpu().numpy())\n",
    "            all_labels.extend([labels[xi] for xi in batch_centers])\n",
    "\n",
    "    # 计算指标\n",
    "    auc_score = roc_auc_score(all_labels, all_probs)\n",
    "    ap_score = average_precision_score(all_labels, all_probs)\n",
    "    pred_labels = (np.array(all_probs) >= 0.5).astype(int)\n",
    "    f1 = f1_score(all_labels, pred_labels, average='macro')\n",
    "    g_mean = calculate_g_mean(all_labels, pred_labels)\n",
    "\n",
    "    return auc_score, ap_score, f1, g_mean\n",
    "\n",
    "\n",
    "class BSNE_Transformer(nn.Module):\n",
    "    def __init__(self, in_feat, out_feat, relation_nums=3, d_model=256,\n",
    "                 nhead=8, num_layers=3, dim_feedforward=256,\n",
    "                 drop_rate=0.5):\n",
    "        super().__init__()\n",
    "        self.relation_nums = relation_nums\n",
    "        self.d_model = d_model\n",
    "        self.nhead = nhead\n",
    "\n",
    "        self.feature_proj = nn.Sequential(\n",
    "            nn.Linear(in_feat, d_model),\n",
    "            nn.LayerNorm(d_model),\n",
    "            nn.ReLU()\n",
    "        )\n",
    "        self.norm = nn.LayerNorm(d_model)\n",
    "\n",
    "        self.relation_encoders = nn.ModuleList([\n",
    "            TransformerEncoder(\n",
    "                TransformerEncoderLayer(\n",
    "                    d_model=d_model,\n",
    "                    nhead=nhead,\n",
    "                    dim_feedforward=dim_feedforward,\n",
    "                    dropout=drop_rate,\n",
    "                    batch_first=True\n",
    "                ),\n",
    "                num_layers=num_layers\n",
    "            ) for _ in range(relation_nums)\n",
    "        ])\n",
    "\n",
    "        self.classifier = nn.Sequential(\n",
    "            nn.Linear(relation_nums * d_model, 512),\n",
    "            nn.ReLU(),\n",
    "            nn.Dropout(drop_rate),\n",
    "            nn.Linear(512, out_feat)\n",
    "        )\n",
    "        for p in self.parameters():\n",
    "            if p.dim() > 1:\n",
    "                nn.init.xavier_uniform_(p)\n",
    "\n",
    "    #         self.node_feature_extractor = nn.Sequential(\n",
    "    #             nn.Linear(relation_nums * d_model, d_model),\n",
    "    #             nn.ReLU(),\n",
    "    #             nn.LayerNorm(d_model)\n",
    "    #         )\n",
    "\n",
    "    def forward(self, subgraph_batch):\n",
    "        # 为每个子图单独处理\n",
    "        center_logits_list = []\n",
    "        center_features_list = []\n",
    "\n",
    "        for sg in subgraph_batch:\n",
    "            # 处理单个子图\n",
    "            features = self.feature_proj(sg['features'].unsqueeze(0))  # [1, num_nodes, d_model]\n",
    "            features = self.norm(features)\n",
    "\n",
    "            # 为每个关系类型单独处理\n",
    "            rel_outputs = []\n",
    "            num_nodes = features.size(1)\n",
    "\n",
    "            for rel_idx in range(self.relation_nums):\n",
    "                # 构建当前关系的邻接矩阵\n",
    "                edge_index = sg['edges'][rel_idx]\n",
    "                adj = torch.zeros(num_nodes, num_nodes,\n",
    "                                  dtype=torch.float, device=features.device)\n",
    "\n",
    "                if edge_index.size(1) > 0:\n",
    "                    src, dst = edge_index\n",
    "                    adj[src, dst] = 1.0\n",
    "\n",
    "                # 添加自环\n",
    "                adj[range(num_nodes), range(num_nodes)] = 1.0\n",
    "\n",
    "                # 创建注意力掩码\n",
    "                adj_mask = adj.masked_fill(adj == 0.0, float('-inf'))\n",
    "                adj_mask = adj_mask.masked_fill(adj == 1.0, 0.0)\n",
    "\n",
    "                # 扩展为多头注意力掩码\n",
    "                adj_mask = adj_mask.unsqueeze(0).unsqueeze(0)  # [1, 1, num_nodes, num_nodes]\n",
    "                adj_mask = adj_mask.expand(1, self.nhead, num_nodes, num_nodes)\n",
    "                adj_mask = adj_mask.reshape(-1, num_nodes, num_nodes)\n",
    "\n",
    "                # 关系编码\n",
    "                encoder_output = self.relation_encoders[rel_idx](\n",
    "                    src=features,\n",
    "                    mask=adj_mask\n",
    "                )\n",
    "                rel_outputs.append(encoder_output)\n",
    "\n",
    "            # 合并多关系特征\n",
    "            combined = torch.cat(rel_outputs, dim=-1)  # [1, num_nodes, rel*d_model]\n",
    "\n",
    "            # 提取中心节点特征（第一个节点）\n",
    "            center_features = combined[:, 0, :]  # [1, rel*d_model]\n",
    "            center_logits = self.classifier(center_features)  # [1, out_feat]\n",
    "            center_logits = F.log_softmax(center_logits, dim=-1)\n",
    "\n",
    "            center_logits_list.append(center_logits)\n",
    "            center_features_list.append(center_features)\n",
    "\n",
    "        # 将结果堆叠为批次\n",
    "        center_logits = torch.cat(center_logits_list, dim=0)\n",
    "        center_features = torch.cat(center_features_list, dim=0)\n",
    "\n",
    "        return center_logits, center_features\n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "5ba01b16",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "cuda\n",
      "loading data...\n",
      "\n",
      "=== Starting Pretraining ===\n",
      "local_loss: 0.31578776893848676,global_loss: 0.1519259810447693\n",
      "local_loss: 0.2957234296526537,global_loss: 1.2577629089355469\n",
      "local_loss: 0.3326569083479613,global_loss: 1.029839277267456\n",
      "local_loss: 0.36208775182398284,global_loss: 0.7231440544128418\n",
      "local_loss: 0.35633630798731936,global_loss: -0.04313505440950394\n",
      "local_loss: 0.590442418523669,global_loss: 0.6568618416786194\n",
      "local_loss: 0.45620456422310995,global_loss: 0.6402139663696289\n",
      "local_loss: 0.34232755691537897,global_loss: 1.4015612602233887\n",
      "feature_proj.0.weight -0.002668848494067788 0.15138716995716095\n",
      "feature_proj.0.bias 0.0040625231340527534 0.11707000434398651\n",
      "feature_proj.1.weight 0.999995231628418 4.9331407353747636e-05\n",
      "feature_proj.1.bias -3.124737304460723e-06 4.950085349264555e-05\n",
      "norm.weight 1.0000015497207642 5.0352206017123535e-05\n",
      "norm.bias -3.1336076062871143e-06 5.0287471822230145e-05\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_weight -0.0008238173904828727 0.08855343610048294\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_bias -5.1161150622647256e-06 4.084391184733249e-05\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.weight -0.0006303893169388175 0.12593775987625122\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.bias -3.1249883249984123e-06 5.029662497690879e-05\n",
      "relation_encoders.0.layers.0.linear1.weight 0.0006876029074192047 0.07898281514644623\n",
      "relation_encoders.0.layers.0.linear1.bias 0.00411180080845952 0.0716613307595253\n",
      "relation_encoders.0.layers.0.linear2.weight 0.0003667499404400587 0.07986607402563095\n",
      "relation_encoders.0.layers.0.linear2.bias -0.00037201005034148693 0.038840100169181824\n",
      "relation_encoders.0.layers.0.norm1.weight 1.0000015497207642 5.035690264776349e-05\n",
      "relation_encoders.0.layers.0.norm1.bias -7.812566764187068e-06 4.977611388312653e-05\n",
      "relation_encoders.0.layers.0.norm2.weight 1.0 5.0380174798192456e-05\n",
      "relation_encoders.0.layers.0.norm2.bias -1.5624877960362937e-06 5.037056689616293e-05\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_weight 0.00044880411587655544 0.08807627856731415\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_bias 2.171979303966509e-06 4.116454510949552e-05\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.weight 0.0011571741197258234 0.12522174417972565\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.bias -4.687403816205915e-06 5.0173115596408024e-05\n",
      "relation_encoders.0.layers.1.linear1.weight 0.0004448324616532773 0.07878211885690689\n",
      "relation_encoders.0.layers.1.linear1.bias 0.0041207848116755486 0.07165682315826416\n",
      "relation_encoders.0.layers.1.linear2.weight 0.00045277804019860923 0.07925903052091599\n",
      "relation_encoders.0.layers.1.linear2.bias -0.0003798226825892925 0.03884696960449219\n",
      "relation_encoders.0.layers.1.norm1.weight 1.0 5.038392555434257e-05\n",
      "relation_encoders.0.layers.1.norm1.bias -6.250093974813353e-06 4.9999765906250104e-05\n",
      "relation_encoders.0.layers.1.norm2.weight 1.0000015497207642 5.035501453676261e-05\n",
      "relation_encoders.0.layers.1.norm2.bias -6.230493454495445e-09 5.038889139541425e-05\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_weight -0.0003089577076025307 0.08884979039430618\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_bias -1.8122191249858588e-06 4.109395376872271e-05\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.weight -0.0029866823460906744 0.12495901435613632\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.bias -1.5624880234099692e-06 5.037055962020531e-05\n",
      "relation_encoders.0.layers.2.linear1.weight 0.00031562172807753086 0.07892996817827225\n",
      "relation_encoders.0.layers.2.linear1.bias 0.00412039365619421 0.07166231423616409\n",
      "relation_encoders.0.layers.2.linear2.weight -0.0006127426167950034 0.07939323037862778\n",
      "relation_encoders.0.layers.2.linear2.bias -0.0003720098175108433 0.03884036839008331\n",
      "relation_encoders.0.layers.2.norm1.weight 1.0000077486038208 4.9763595598051324e-05\n",
      "relation_encoders.0.layers.2.norm1.bias 6.250052592804423e-06 4.999963493901305e-05\n",
      "relation_encoders.0.layers.2.norm2.weight 0.999985933303833 4.835678555537015e-05\n",
      "relation_encoders.0.layers.2.norm2.bias -1.1436575277912198e-06 2.5270921469200402e-05\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_weight -0.0007503621163778007 0.08870630711317062\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_bias 4.223899850330781e-06 4.077929042978212e-05\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.weight 0.0009403240401297808 0.12586697936058044\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.bias -4.687483851739671e-06 5.0173213821835816e-05\n",
      "relation_encoders.1.layers.0.linear1.weight 0.0005055658984929323 0.0790960043668747\n",
      "relation_encoders.1.layers.0.linear1.bias 0.0016253957292065024 0.07252722978591919\n",
      "relation_encoders.1.layers.0.linear2.weight -0.00018739556253422052 0.0790252760052681\n",
      "relation_encoders.1.layers.0.linear2.bias -0.004547403659671545 0.035641271620988846\n",
      "relation_encoders.1.layers.0.norm1.weight 0.9999999403953552 5.037924711359665e-05\n",
      "relation_encoders.1.layers.0.norm1.bias -6.249964371818351e-06 4.999985685572028e-05\n",
      "relation_encoders.1.layers.0.norm2.weight 0.9999983906745911 5.0356895371805876e-05\n",
      "relation_encoders.1.layers.0.norm2.bias -4.687500677391654e-06 5.017322473577224e-05\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_weight -0.0008828968275338411 0.08809544891119003\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_bias 3.0355408853210974e-07 4.101271406398155e-05\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.weight -0.0014939194079488516 0.12502293288707733\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.bias 4.67772088086349e-06 5.016428985982202e-05\n",
      "relation_encoders.1.layers.1.linear1.weight -0.00047514092875644565 0.079120934009552\n",
      "relation_encoders.1.layers.1.linear1.bias 0.0016261768760159612 0.07252753525972366\n",
      "relation_encoders.1.layers.1.linear2.weight -0.0006588182295672596 0.07914432883262634\n",
      "relation_encoders.1.layers.1.linear2.bias -0.0045411535538733006 0.03563057631254196\n",
      "relation_encoders.1.layers.1.norm1.weight 0.9999936819076538 4.9984653742285445e-05\n",
      "relation_encoders.1.layers.1.norm1.bias 4.687492946686689e-06 5.017311195842922e-05\n",
      "relation_encoders.1.layers.1.norm2.weight 1.0000046491622925 5.0155213102698326e-05\n",
      "relation_encoders.1.layers.1.norm2.bias 3.124957402178552e-06 5.0296628614887595e-05\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_weight -0.0012017728295177221 0.08866920322179794\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_bias 2.6013537990365876e-06 4.0879509469959885e-05\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.weight 8.813804015517235e-07 0.12473064661026001\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.bias -4.687572982220445e-06 5.0173126510344446e-05\n",
      "relation_encoders.1.layers.2.linear1.weight 0.00019358414283487946 0.0785611942410469\n",
      "relation_encoders.1.layers.2.linear1.bias 0.0016238337848335505 0.07252924144268036\n",
      "relation_encoders.1.layers.2.linear2.weight -9.935331763699651e-05 0.07862865179777145\n",
      "relation_encoders.1.layers.2.linear2.bias -0.0045411535538733006 0.03563908115029335\n",
      "relation_encoders.1.layers.2.norm1.weight 0.9999969005584717 5.0289811042603105e-05\n",
      "relation_encoders.1.layers.2.norm1.bias -4.687337423092686e-06 5.0173002819065005e-05\n",
      "relation_encoders.1.layers.2.norm2.weight 0.9999796748161316 4.6041452151257545e-05\n",
      "relation_encoders.1.layers.2.norm2.bias -6.85589247950702e-06 2.405357918178197e-05\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_weight 0.00027920628781430423 0.0891411229968071\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_bias 9.142626140601351e-07 4.108548819203861e-05\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.weight 0.002378573175519705 0.12615123391151428\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.bias -3.125001057924237e-06 5.029661406297237e-05\n",
      "relation_encoders.2.layers.0.linear1.weight 0.00045371410669758916 0.07863827049732208\n",
      "relation_encoders.2.layers.0.linear1.bias -0.0024154288694262505 0.07242870330810547\n",
      "relation_encoders.2.layers.0.linear2.weight -0.000899971928447485 0.07878705859184265\n",
      "relation_encoders.2.layers.0.linear2.bias 0.0017670525703579187 0.03728872910141945\n",
      "relation_encoders.2.layers.0.norm1.weight 1.0000030994415283 5.030680404161103e-05\n",
      "relation_encoders.2.layers.0.norm1.bias -3.1249849143932806e-06 5.02966322528664e-05\n",
      "relation_encoders.2.layers.0.norm2.weight 0.9999969005584717 5.0294514949200675e-05\n",
      "relation_encoders.2.layers.0.norm2.bias 4.687505224865163e-06 5.017311923438683e-05\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_weight 0.0005081089911982417 0.08868993073701859\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_bias 4.5561318984255195e-07 4.102887396584265e-05\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.weight -0.003999773412942886 0.12366087734699249\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.bias 2.637534635141492e-11 5.0395112339174375e-05\n",
      "relation_encoders.2.layers.1.linear1.weight -5.256402073428035e-05 0.07916244864463806\n",
      "relation_encoders.2.layers.1.linear1.bias -0.0024189434479922056 0.07242821156978607\n",
      "relation_encoders.2.layers.1.linear2.weight -3.553926944732666e-06 0.07928280532360077\n",
      "relation_encoders.2.layers.1.linear2.bias 0.0017748655518516898 0.037290122359991074\n",
      "relation_encoders.2.layers.1.norm1.weight 0.9999984502792358 5.036817674408667e-05\n",
      "relation_encoders.2.layers.1.norm1.bias 4.687147793447366e-06 5.017281728214584e-05\n",
      "relation_encoders.2.layers.1.norm2.weight 0.9999984502792358 5.0363465561531484e-05\n",
      "relation_encoders.2.layers.1.norm2.bias -6.249915713851806e-06 4.999986049369909e-05\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_weight -0.0016668213065713644 0.08936823904514313\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_bias 4.59148213849403e-06 4.075307515449822e-05\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.weight -0.0006114120478741825 0.12484939396381378\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.bias 1.5625173546141014e-06 5.037047594669275e-05\n",
      "relation_encoders.2.layers.2.linear1.weight -0.00016097120533231646 0.07914784550666809\n",
      "relation_encoders.2.layers.2.linear1.bias -0.002418162301182747 0.07242590188980103\n",
      "relation_encoders.2.layers.2.linear2.weight 0.00014494523929897696 0.07916872203350067\n",
      "relation_encoders.2.layers.2.linear2.bias 0.0017654894618317485 0.03728716820478439\n",
      "relation_encoders.2.layers.2.norm1.weight 0.9999969005584717 5.0293576350668445e-05\n",
      "relation_encoders.2.layers.2.norm1.bias 2.432898327242583e-11 5.03950723214075e-05\n",
      "relation_encoders.2.layers.2.norm2.weight 0.9999890327453613 4.9166028475156054e-05\n",
      "relation_encoders.2.layers.2.norm2.bias 2.499506877029489e-07 2.838859290932305e-05\n",
      "classifier.0.weight 0.00013326025509741157 0.05322743579745293\n",
      "classifier.0.bias -0.00010216474765911698 0.041867803782224655\n",
      "classifier.3.weight 0.0014577601104974747 0.062292683869600296\n",
      "classifier.3.bias -0.034972481429576874 0.009602691046893597\n",
      "Pretrain Epoch: 000, BSNE Loss: 1.1087\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "local_loss: 0.5577338037897981,global_loss: 0.37270569801330566\n",
      "local_loss: 0.44930615986719985,global_loss: 0.339718222618103\n",
      "local_loss: 0.23953702560053594,global_loss: 1.1520227193832397\n",
      "local_loss: 0.37079996814621746,global_loss: 1.4367971420288086\n",
      "local_loss: 0.31426217635679854,global_loss: 0.5276272296905518\n",
      "local_loss: 0.3154168610185258,global_loss: 0.7358899116516113\n",
      "local_loss: 0.34408642162948544,global_loss: 0.7522270083427429\n",
      "local_loss: 0.4982192493443492,global_loss: 0.9403790235519409\n",
      "feature_proj.0.weight -0.002677005250006914 0.15138792991638184\n",
      "feature_proj.0.bias 0.004052502103149891 0.11708202213048935\n",
      "feature_proj.1.weight 0.9999897480010986 7.373813423328102e-05\n",
      "feature_proj.1.bias -5.949672413407825e-06 7.666154851904139e-05\n",
      "norm.weight 1.0000022649765015 7.589149754494429e-05\n",
      "norm.bias -3.3556350444996497e-06 7.348821236519143e-05\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_weight -0.00082384143024683 0.08855338394641876\n",
      "relation_encoders.0.layers.0.self_attn.in_proj_bias -6.45696582068922e-06 6.09648777754046e-05\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.weight -0.0006312043406069279 0.12593744695186615\n",
      "relation_encoders.0.layers.0.self_attn.out_proj.bias -9.522793334326707e-06 7.021422061370686e-05\n",
      "relation_encoders.0.layers.0.linear1.weight 0.0006876441184431314 0.07898294925689697\n",
      "relation_encoders.0.layers.0.linear1.bias 0.004110412206500769 0.07166211307048798\n",
      "relation_encoders.0.layers.0.linear2.weight 0.00036730850115418434 0.07986560463905334\n",
      "relation_encoders.0.layers.0.linear2.bias -0.0003708649892359972 0.038833487778902054\n",
      "relation_encoders.0.layers.0.norm1.weight 1.0000007152557373 7.626372098457068e-05\n",
      "relation_encoders.0.layers.0.norm1.bias -1.8186909073847346e-05 7.457556785084307e-05\n",
      "relation_encoders.0.layers.0.norm2.weight 1.000000238418579 7.650360930711031e-05\n",
      "relation_encoders.0.layers.0.norm2.bias -9.922241588355973e-07 7.908229599706829e-05\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_weight 0.00044879221240989864 0.08807665854692459\n",
      "relation_encoders.0.layers.1.self_attn.in_proj_bias 3.457474576862296e-06 6.20691425865516e-05\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.weight 0.0011564042652025819 0.12522251904010773\n",
      "relation_encoders.0.layers.1.self_attn.out_proj.bias -7.503057531721424e-06 7.260731945279986e-05\n",
      "relation_encoders.0.layers.1.linear1.weight 0.00044498126953840256 0.0787818431854248\n",
      "relation_encoders.0.layers.1.linear1.bias 0.004122463520616293 0.07165646553039551\n",
      "relation_encoders.0.layers.1.linear2.weight 0.0004509691207204014 0.07925873249769211\n",
      "relation_encoders.0.layers.1.linear2.bias -0.00038227695040404797 0.03885253891348839\n",
      "relation_encoders.0.layers.1.norm1.weight 0.9999987483024597 7.939734496176243e-05\n",
      "relation_encoders.0.layers.1.norm1.bias -1.296408663620241e-05 7.10506210452877e-05\n",
      "relation_encoders.0.layers.1.norm2.weight 0.9999983906745911 7.877395546529442e-05\n",
      "relation_encoders.0.layers.1.norm2.bias 2.5824197109614033e-06 7.287380140041932e-05\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_weight -0.00030895828967913985 0.08885028958320618\n",
      "relation_encoders.0.layers.2.self_attn.in_proj_bias -2.759528797469102e-06 6.0312515415716916e-05\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.weight -0.0029867184348404408 0.12495889514684677\n",
      "relation_encoders.0.layers.2.self_attn.out_proj.bias -3.403501295906608e-06 7.739238208159804e-05\n",
      "relation_encoders.0.layers.2.linear1.weight 0.0003157741448376328 0.07892979681491852\n",
      "relation_encoders.0.layers.2.linear1.bias 0.0041199177503585815 0.07166264206171036\n",
      "relation_encoders.0.layers.2.linear2.weight -0.0006117218872532248 0.07939351350069046\n",
      "relation_encoders.0.layers.2.linear2.bias -0.00037051201798021793 0.03883415088057518\n",
      "relation_encoders.0.layers.2.norm1.weight 1.0000097751617432 7.866497617214918e-05\n",
      "relation_encoders.0.layers.2.norm1.bias 8.154392162396107e-06 7.89406185504049e-05\n",
      "relation_encoders.0.layers.2.norm2.weight 0.9999690055847168 7.191272743511945e-05\n",
      "relation_encoders.0.layers.2.norm2.bias -2.0222094008204294e-06 4.023710425826721e-05\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_weight -0.000750175619032234 0.0887068584561348\n",
      "relation_encoders.1.layers.0.self_attn.in_proj_bias 4.167488441453315e-06 6.471109372796491e-05\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.weight 0.0009403384756296873 0.12586675584316254\n",
      "relation_encoders.1.layers.0.self_attn.out_proj.bias -5.281000994727947e-06 8.063241693889722e-05\n",
      "relation_encoders.1.layers.0.linear1.weight 0.0005058004753664136 0.07909571379423141\n",
      "relation_encoders.1.layers.0.linear1.bias 0.0016220917459577322 0.07253167778253555\n",
      "relation_encoders.1.layers.0.linear2.weight -0.00018720551452133805 0.07902470976114273\n",
      "relation_encoders.1.layers.0.linear2.bias -0.004553085193037987 0.035645030438899994\n",
      "relation_encoders.1.layers.0.norm1.weight 0.9999982118606567 7.484867091989145e-05\n",
      "relation_encoders.1.layers.0.norm1.bias -5.746328497480135e-06 7.63555581215769e-05\n",
      "relation_encoders.1.layers.0.norm2.weight 1.0000004768371582 7.79705515014939e-05\n",
      "relation_encoders.1.layers.0.norm2.bias -1.3274771845317446e-05 7.229203765746206e-05\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_weight -0.0008828219724819064 0.08809565752744675\n",
      "relation_encoders.1.layers.1.self_attn.in_proj_bias 3.771436922761495e-07 6.283612310653552e-05\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.weight -0.0014937606174498796 0.1250235140323639\n",
      "relation_encoders.1.layers.1.self_attn.out_proj.bias 5.72182034375146e-06 7.604286656714976e-05\n",
      "relation_encoders.1.layers.1.linear1.weight -0.0004750757070723921 0.07912106812000275\n",
      "relation_encoders.1.layers.1.linear1.bias 0.00162741937674582 0.07252318412065506\n",
      "relation_encoders.1.layers.1.linear2.weight -0.0006588716641999781 0.07914428412914276\n",
      "relation_encoders.1.layers.1.linear2.bias -0.0045407055877149105 0.03562973812222481\n",
      "relation_encoders.1.layers.1.norm1.weight 0.9999943971633911 7.561796519439667e-05\n",
      "relation_encoders.1.layers.1.norm1.bias 1.324910954281222e-06 7.547852874267846e-05\n",
      "relation_encoders.1.layers.1.norm2.weight 1.0000027418136597 7.24178462405689e-05\n",
      "relation_encoders.1.layers.1.norm2.bias 3.7021104617451783e-06 7.892114081187174e-05\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_weight -0.0012020694557577372 0.08866927027702332\n",
      "relation_encoders.1.layers.2.self_attn.in_proj_bias 1.3506322602552245e-06 6.342594133457169e-05\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.weight 1.1504162102937698e-06 0.12473099678754807\n",
      "relation_encoders.1.layers.2.self_attn.out_proj.bias -6.5819658630061895e-06 7.152093894546852e-05\n",
      "relation_encoders.1.layers.2.linear1.weight 0.00019372953101992607 0.07856081426143646\n",
      "relation_encoders.1.layers.2.linear1.bias 0.0016202047700062394 0.0725303664803505\n",
      "relation_encoders.1.layers.2.linear2.weight -9.774543286766857e-05 0.07862819731235504\n",
      "relation_encoders.1.layers.2.linear2.bias -0.004540624096989632 0.03563828393816948\n",
      "relation_encoders.1.layers.2.norm1.weight 0.9999966025352478 7.736760744592175e-05\n",
      "relation_encoders.1.layers.2.norm1.bias -1.9866483853547834e-06 8.106900349957868e-05\n",
      "relation_encoders.1.layers.2.norm2.weight 0.9999651908874512 6.96072747814469e-05\n",
      "relation_encoders.1.layers.2.norm2.bias -1.1989088307018392e-05 3.960074172937311e-05\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_weight 0.00027864467119798064 0.08914126455783844\n",
      "relation_encoders.2.layers.0.self_attn.in_proj_bias 2.108167791448068e-06 6.516456778626889e-05\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.weight 0.0023786972742527723 0.1261504590511322\n",
      "relation_encoders.2.layers.0.self_attn.out_proj.bias -6.106869932409609e-06 7.393232954200357e-05\n",
      "relation_encoders.2.layers.0.linear1.weight 0.0004532409366220236 0.07863830775022507\n",
      "relation_encoders.2.layers.0.linear1.bias -0.0024127098731696606 0.07242953032255173\n",
      "relation_encoders.2.layers.0.linear2.weight -0.0009018603013828397 0.07878710329532623\n",
      "relation_encoders.2.layers.0.linear2.bias 0.0017642845632508397 0.037290312349796295\n",
      "relation_encoders.2.layers.0.norm1.weight 1.0000009536743164 7.011373236309737e-05\n",
      "relation_encoders.2.layers.0.norm1.bias -1.2230407264723908e-06 7.441526395268738e-05\n",
      "relation_encoders.2.layers.0.norm2.weight 0.999991238117218 7.84864168963395e-05\n",
      "relation_encoders.2.layers.0.norm2.bias 8.763654477661476e-06 8.191188680939376e-05\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_weight 0.0005080020055174828 0.08868967741727829\n",
      "relation_encoders.2.layers.1.self_attn.in_proj_bias 2.042291271209251e-06 6.18018166278489e-05\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.weight -0.0039997040294110775 0.12366117537021637\n",
      "relation_encoders.2.layers.1.self_attn.out_proj.bias -8.753826250540442e-07 7.777203427394852e-05\n",
      "relation_encoders.2.layers.1.linear1.weight -5.22948830621317e-05 0.0791623443365097\n",
      "relation_encoders.2.layers.1.linear1.bias -0.0024211606942117214 0.07242851704359055\n",
      "relation_encoders.2.layers.1.linear2.weight -3.448418283369392e-06 0.07928265631198883\n",
      "relation_encoders.2.layers.1.linear2.bias 0.0017796455649659038 0.03728951886296272\n",
      "relation_encoders.2.layers.1.norm1.weight 0.9999955892562866 8.106086897896603e-05\n",
      "relation_encoders.2.layers.1.norm1.bias 1.3184102499508299e-05 7.557892240583897e-05\n",
      "relation_encoders.2.layers.1.norm2.weight 0.9999979734420776 8.256349974544719e-05\n",
      "relation_encoders.2.layers.1.norm2.bias -5.1815222832374275e-06 7.860180630814284e-05\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_weight -0.0016668555326759815 0.08936820179224014\n",
      "relation_encoders.2.layers.2.self_attn.in_proj_bias 5.451820470625535e-06 6.220434443093836e-05\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.weight -0.0006111521506682038 0.12484978884458542\n",
      "relation_encoders.2.layers.2.self_attn.out_proj.bias -3.188184564351104e-06 7.629313040524721e-05\n",
      "relation_encoders.2.layers.2.linear1.weight -0.00016113952733576298 0.07914771139621735\n",
      "relation_encoders.2.layers.2.linear1.bias -0.0024188607931137085 0.07242628931999207\n",
      "relation_encoders.2.layers.2.linear2.weight 0.00014546324382536113 0.07916868478059769\n",
      "relation_encoders.2.layers.2.linear2.bias 0.0017655397532507777 0.03729575499892235\n",
      "relation_encoders.2.layers.2.norm1.weight 0.9999955296516418 7.744379399809986e-05\n",
      "relation_encoders.2.layers.2.norm1.bias 4.917140813631704e-06 7.444826769642532e-05\n",
      "relation_encoders.2.layers.2.norm2.weight 0.9999760389328003 7.46964433346875e-05\n",
      "relation_encoders.2.layers.2.norm2.bias -1.8563130197435385e-06 4.32321940024849e-05\n",
      "classifier.0.weight 0.00013326025509741157 0.05322743579745293\n",
      "classifier.0.bias -0.00010216474765911698 0.041867803782224655\n",
      "classifier.3.weight 0.0014577601104974747 0.062292683869600296\n",
      "classifier.3.bias -0.034972481429576874 0.009602691046893597\n",
      "Pretrain Epoch: 001, BSNE Loss: 1.1683\n"
     ]
    }
   ],
   "source": [
    "# 进行完整测试（新）\n",
    "\n",
    "# 参数设置\n",
    "args = {\n",
    "    \"dataset\": \"amazon\",\n",
    "#     \"dataset\": \"yelp\",\n",
    "    \"batch_size\": 8,\n",
    "    \"sample_size\": 50,\n",
    "    \"weight_decay\": 0.00005,\n",
    "    \"emb_size\": 32,\n",
    "    \"pretrain_epochs\": 200,\n",
    "    \"pretrain_lr\": 0.00005, #0.0001\n",
    "    \"finetune_lr\": 0.0005,\n",
    "    \"num_epochs\": 200,\n",
    "    \"pretrain_patience\": 20,\n",
    "    \"patience\": 30,\n",
    "    \"tsne_weight\": 0.3,\n",
    "    \"weight\": 0.6,\n",
    "    \"layers\": 7,\n",
    "    \"test_size\": 0.6,\n",
    "    \"val_size\": 0.5,\n",
    "    \"layers_tree\": 7,\n",
    "    \"seed\": 76,\n",
    "    \"num_heads\": 2,\n",
    "    \"drop_rate\": 0.5\n",
    "}\n",
    "\n",
    "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n",
    "print(device)\n",
    "\n",
    "timestamp = time.strftime(\"%Y%m%d-%H%M%S\")\n",
    "writer = SummaryWriter(f'runs/{args[\"dataset\"]}_{timestamp}')\n",
    "\n",
    "print('loading data...')\n",
    "prefix = \"../../data/\"\n",
    "\n",
    "edge_indexs, feat_data, labels = load_data(args['dataset'], args['layers_tree'], prefix)\n",
    "\n",
    "np.random.seed(args['seed'])\n",
    "rd.seed(args['seed'])\n",
    "\n",
    "if args['dataset'] == 'yelp':\n",
    "    index = list(range(len(labels)))\n",
    "    idx_train_val, idx_test, y_train_val, y_test = train_test_split(index, labels, stratify=labels,\n",
    "                                                                    test_size=args['test_size'], random_state=2,\n",
    "                                                                    shuffle=True)\n",
    "    idx_train, idx_val, y_train, y_val = train_test_split(idx_train_val, y_train_val,\n",
    "                                                          stratify=y_train_val, test_size=args['val_size'],\n",
    "                                                          random_state=2, shuffle=True)\n",
    "    dist_path = os.path.join(prefix, \"YelpChi_shortest_distance.pkl\")\n",
    "elif args['dataset'] == 'amazon':\n",
    "    index = list(range(3305, len(labels)))\n",
    "    idx_train_val, idx_test, y_train_val, y_test = train_test_split(index, labels[3305:],\n",
    "                                                                    stratify=labels[3305:],\n",
    "                                                                    test_size=args['test_size'],\n",
    "                                                                    random_state=2, shuffle=True)\n",
    "    idx_train, idx_val, y_train, y_val = train_test_split(idx_train_val, y_train_val,\n",
    "                                                          stratify=y_train_val, test_size=args['val_size'],\n",
    "                                                          random_state=2, shuffle=True)\n",
    "    dist_path = os.path.join(prefix, \"Amazon_shortest_distance.pkl\")\n",
    "\n",
    "with open(dist_path, 'rb') as f:\n",
    "    dist_data = pickle.load(f)\n",
    "    dist_matrix = torch.tensor(dist_data['dist_matrix']).to(device)\n",
    "\n",
    "# 准备特征数据\n",
    "feat_data = torch.tensor(feat_data).float()\n",
    "# 特征归一化\n",
    "scaler = MinMaxScaler()\n",
    "feat_data = torch.tensor(scaler.fit_transform(feat_data)).float().to(device)\n",
    "\n",
    "# 初始化模型\n",
    "bsne_model = BSNE_Transformer(\n",
    "    in_feat=feat_data.shape[1],\n",
    "    out_feat=2,\n",
    "    relation_nums=len(edge_indexs),\n",
    "    d_model=64,\n",
    "    nhead=args['num_heads'],\n",
    "    num_layers=3,\n",
    "    dim_feedforward=256,\n",
    "    drop_rate=args['drop_rate']\n",
    ").to(device)\n",
    "\n",
    "# 将边索引转移到设备\n",
    "for edge_index in edge_indexs:\n",
    "    edge_index[0] = edge_index[0].to(device)\n",
    "    edge_index[1] = [tensor.to(device) for tensor in edge_index[1]]\n",
    "\n",
    "all_local_losses = []      # 每个epoch的batch本地损失列表\n",
    "all_global_losses = []     # 每个epoch的batch全局损失列表\n",
    "all_bsne_losses = []       # 每个epoch的总BSNE损失\n",
    "epoch_avg_local = []       # 每个epoch的平均本地损失\n",
    "epoch_avg_global = []      # 每个epoch的平均全局损失\n",
    "    \n",
    "    \n",
    "print(\"\\n=== Starting Pretraining ===\")\n",
    "\n",
    "bsne_model.classifier.requires_grad_(False)\n",
    "optimizer = torch.optim.AdamW(\n",
    "    filter(lambda p: p.requires_grad, bsne_model.parameters()),\n",
    "    lr=args['pretrain_lr'],\n",
    "    weight_decay=5e-5\n",
    ")\n",
    "pretrain_best_loss = float('inf')\n",
    "pretrain_no_improve = 0\n",
    "pretrain_early_stop = False\n",
    "\n",
    "for epoch in range(args['pretrain_epochs']):\n",
    "#     if pretrain_early_stop:\n",
    "#         break\n",
    "\n",
    "    bsne_model.train()\n",
    "    optimizer.zero_grad()\n",
    "    \n",
    "    \n",
    "    epoch_local_losses = []\n",
    "    epoch_global_losses = []\n",
    "    \n",
    "    \n",
    "    # batch_centers = rd.sample(range(feat_data.shape[0]), args['batch_size'])\n",
    "    batch_centers = np.random.choice(range(feat_data.shape[0]), size=args['batch_size'], replace=False)\n",
    "\n",
    "\n",
    "    sample_size = args['sample_size']\n",
    "    temperature = 100\n",
    "    eps = 1e-10\n",
    "\n",
    "    total_loss = 0.0\n",
    "    batch_size = len(batch_centers)\n",
    "\n",
    "    # 为每个中心节点构建Bp子图\n",
    "    bp_subgraphs = []\n",
    "    for xi in batch_centers:\n",
    "        # 根据距离矩阵计算采样概率\n",
    "        dist_row = dist_matrix[xi].cpu().numpy()\n",
    "        probs = np.power(10.0, -dist_row)\n",
    "        probs[xi] = 0  # 排除自身\n",
    "        probs /= probs.sum()\n",
    "\n",
    "        # 采样邻居节点\n",
    "        available_nodes = len(dist_row) - 1\n",
    "        actual_sample_size = min(sample_size, available_nodes)\n",
    "\n",
    "        if actual_sample_size > 0:\n",
    "            neighbors = np.random.choice(len(dist_row), size=actual_sample_size, p=probs, replace=False)\n",
    "            subgraph_nodes = [xi] + neighbors.tolist()\n",
    "        else:\n",
    "            subgraph_nodes = [xi]\n",
    "\n",
    "        bp_subgraphs.append(subgraph_nodes)\n",
    "\n",
    "    # 为每个中心节点构建Bu子图（均匀采样）\n",
    "    bu_subgraphs = []\n",
    "    for xi in batch_centers:\n",
    "        dist_row = dist_matrix[xi].cpu().numpy()\n",
    "\n",
    "        # 随机采样节点构建Bu子图\n",
    "        available_nodes = len(dist_row) - 1\n",
    "        actual_sample_size = min(sample_size, available_nodes)\n",
    "\n",
    "        if actual_sample_size > 0:\n",
    "            neighbors = np.random.choice(len(dist_row), size=actual_sample_size, replace=False)\n",
    "            subgraph_nodes = [xi] + neighbors.tolist()\n",
    "        else:\n",
    "            subgraph_nodes = [xi]\n",
    "\n",
    "        bu_subgraphs.append(subgraph_nodes)\n",
    "    # 计算Bp子图中所有节点的特征\n",
    "    bp_features_batch = []\n",
    "    for bp_nodes in bp_subgraphs:\n",
    "        bp_node_features = []\n",
    "\n",
    "        # 为子图中的每个节点计算特征\n",
    "        for node_idx in bp_nodes:\n",
    "            # 构建该节点的一阶邻居子图\n",
    "            node_subgraph = create_node_subgraph(node_idx, feat_data, edge_indexs, device)\n",
    "\n",
    "            # 获取节点特征\n",
    "            _, node_feature = bsne_model([node_subgraph])\n",
    "\n",
    "            bp_node_features.append(node_feature.squeeze(0))\n",
    "\n",
    "        # 将该Bp子图所有节点的特征收集起来\n",
    "        bp_features_batch.append(torch.stack(bp_node_features))  # [sample_size+1, d_model]\n",
    "\n",
    "    # 计算Bu子图中所有节点的特征\n",
    "    bu_features_batch = []\n",
    "    for bu_nodes in bu_subgraphs:\n",
    "        bu_node_features = []\n",
    "\n",
    "        # 为子图中的每个节点计算特征\n",
    "        for node_idx in bu_nodes:\n",
    "            # 构建该节点的一阶邻居子图\n",
    "            node_subgraph = create_node_subgraph(node_idx, feat_data, edge_indexs, device)\n",
    "\n",
    "            # 获取节点特征\n",
    "            _, node_feature = bsne_model([node_subgraph])\n",
    "\n",
    "            bu_node_features.append(node_feature.squeeze(0))\n",
    "\n",
    "        # 将该Bu子图所有节点的特征收集起来\n",
    "        bu_features_batch.append(torch.stack(bu_node_features))  # [sample_size+1, d_model]\n",
    "\n",
    "    # 针对每一对Bp和Bu子图计算loss\n",
    "    for i in range(batch_size):\n",
    "        # 获取中心节点索引和特征\n",
    "        center_idx = batch_centers[i]\n",
    "        center_feature = bp_features_batch[i][0]  # 中心节点是第一个\n",
    "\n",
    "        # 仅获取Bp子图中的其他节点（排除中心节点）\n",
    "        other_bp_indices = bp_subgraphs[i][1:]\n",
    "        other_bp_features = bp_features_batch[i][1:]\n",
    "        \n",
    "        with torch.no_grad():\n",
    "            # 计算中心节点到其他Bp节点的原始距离\n",
    "            orig_dists_bp = dist_matrix[center_idx][other_bp_indices].cpu().numpy()\n",
    "\n",
    "            # 计算P向量（基于节点路径距离）\n",
    "        #     P = (1.0 + orig_dists_bp ** 2) ** -1\n",
    "            P = np.exp(-orig_dists_bp)\n",
    "        #     P = np.power(10.0, -orig_dists_bp)\n",
    "            P_sum = P.sum()\n",
    "            P = P / (P_sum + eps)\n",
    "            P = torch.tensor(P, device=device)\n",
    "\n",
    "        # 计算Q向量（基于特征距离）\n",
    "    #     Q = (1.0 + feat_dists_bp ** 2 / temperature) ** -1\n",
    "    #     Q = np.exp(-feat_dists_bp.detach().numpy())\n",
    "#         Q = np.exp(-feat_dists_bp.detach().cpu().numpy())\n",
    "        # 计算中心节点到其他Bp节点的特征距离\n",
    "        feat_dists_bp = torch.cdist(center_feature.unsqueeze(0), other_bp_features).squeeze(0)\n",
    "        feat_dists_bp.requires_grad_(True)\n",
    "#         Q = np.exp(-feat_dists_bp.cpu().numpy())\n",
    "#         Q_sum = Q.sum()\n",
    "#         Q = Q / (Q_sum + eps)\n",
    "#         Q = torch.tensor(Q, device=device)\n",
    "        \n",
    "        Q = torch.softmax(-feat_dists_bp, dim=0)\n",
    "        \n",
    "#         log_ratio = torch.log(P + eps) - torch.log(Q + eps)\n",
    "        log_ratio = torch.log(((P + eps)/(Q + eps))**2)\n",
    "        loss_local = log_ratio.mean() \n",
    "\n",
    "        # 获取全局距离并计算概率\n",
    "        all_dists = dist_matrix[center_idx].cpu().numpy()\n",
    "        unnorm_probs_global = np.exp(-all_dists)\n",
    "        unnorm_probs_global[center_idx] = 0  # 排除自身\n",
    "\n",
    "        # 计算全局归一化常数\n",
    "        Z_global = unnorm_probs_global.sum() + eps\n",
    "\n",
    "        # 计算B_p节点的全局概率和\n",
    "        bp_global_prob_sum = unnorm_probs_global[other_bp_indices].sum() / Z_global\n",
    "\n",
    "        # 计算k_Bp\n",
    "        N = dist_matrix.shape[0]\n",
    "        k_Bp = bp_global_prob_sum * (N / len(other_bp_indices))\n",
    "\n",
    "        bu_features = bu_features_batch[i][1:]  # 排除中心节点\n",
    "        feat_dists_bu = torch.cdist(center_feature.unsqueeze(0), bu_features).squeeze(0)\n",
    "\n",
    "        sum_e_bu = torch.exp(-feat_dists_bu).sum()\n",
    "        sum_e_bp = torch.exp(-feat_dists_bp).sum()\n",
    "        global_ratio = k_Bp * (sum_e_bu / (sum_e_bp + eps))\n",
    "        loss_global = torch.log(global_ratio.clamp(min=eps, max=1e10)**2)\n",
    "\n",
    "        # = 5. 损失合并 =\n",
    "        total_loss += loss_local + loss_global\n",
    "        \n",
    "        epoch_local_losses.append(loss_local.item())\n",
    "        epoch_global_losses.append(loss_global.item())\n",
    "        \n",
    "        print(f\"local_loss: {loss_local.item()},global_loss: {loss_global.item()}\")\n",
    "\n",
    "    bsne_loss = total_loss / batch_size\n",
    "    \n",
    "    \n",
    "    all_local_losses.append(epoch_local_losses)\n",
    "    all_global_losses.append(epoch_global_losses)\n",
    "    \n",
    "    avg_local = np.mean(epoch_local_losses)\n",
    "    avg_global = np.mean(epoch_global_losses)\n",
    "    epoch_avg_local.append(avg_local)\n",
    "    epoch_avg_global.append(avg_global)\n",
    "    \n",
    "    bsne_loss = total_loss / batch_size\n",
    "    \n",
    "    bsne_loss_item = bsne_loss.item()\n",
    "    all_bsne_losses.append(bsne_loss_item)\n",
    "\n",
    "    bsne_loss.backward()\n",
    "    optimizer.step()\n",
    "    \n",
    "    for name, param in bsne_model.named_parameters():\n",
    "        print(name, param.data.mean().item(), param.data.std().item())\n",
    "#     for name, parms in bsne_model.named_parameters():\n",
    "#         print('-->name:', name, '-->grad_requirs:', parms.requires_grad, '--weight', torch.mean(parms.data), ' -->grad_value:', torch.mean(parms.grad))\n",
    "    \n",
    "    # 更新最佳损失和早停计数器\n",
    "#     if bsne_loss.item() < pretrain_best_loss:\n",
    "#         pretrain_best_loss = bsne_loss.item()\n",
    "#         pretrain_no_improve = 0\n",
    "#     else:\n",
    "#         pretrain_no_improve += 1\n",
    "\n",
    "#     if pretrain_no_improve >= args['pretrain_patience']:\n",
    "#         print(f\"Pretrain early stopping at epoch {epoch}\")\n",
    "#         pretrain_early_stop = True\n",
    "\n",
    "    writer.add_scalar('Pretrain/TSNE_Loss', bsne_loss.item(), epoch)\n",
    "    print(f'Pretrain Epoch: {epoch:03d}, BSNE Loss: {bsne_loss.item():.4f}')\n",
    "#     if epoch % 5 == 0:\n",
    "#         print(f'Pretrain Epoch: {epoch:03d}, BSNE Loss: {bsne_loss.item():.4f}')\n",
    "\n",
    "\n",
    "plot_losses(all_local_losses, 'Local Loss During Pretraining', 'Loss', 'local_loss.png')\n",
    "plot_losses(all_global_losses, 'Global Loss During Pretraining', 'Loss', 'global_loss.png')\n",
    "\n",
    "# 绘制BSNE总损失\n",
    "plt.figure(figsize=(12, 6))\n",
    "plt.plot(all_bsne_losses, 'g-', linewidth=2, label='BSNE Loss')\n",
    "plt.title('Total BSNE Loss During Pretraining')\n",
    "plt.xlabel('Epoch')\n",
    "plt.ylabel('Loss')\n",
    "plt.grid(True)\n",
    "plt.savefig('bsne_loss.png')\n",
    "plt.close()\n",
    "\n",
    "# 绘制局部和全局平均损失对比\n",
    "plt.figure(figsize=(12, 6))\n",
    "plt.plot(epoch_avg_local, 'b-', linewidth=2, label='Local Loss (Avg)')\n",
    "plt.plot(epoch_avg_global, 'r-', linewidth=2, label='Global Loss (Avg)')\n",
    "plt.title('Local vs Global Loss During Pretraining')\n",
    "plt.xlabel('Epoch')\n",
    "plt.ylabel('Loss')\n",
    "plt.legend()\n",
    "plt.grid(True)\n",
    "plt.savefig('local_vs_global.png')\n",
    "plt.close()\n",
    "\n",
    "print(\"\\nLoss plots saved to local_loss.png, global_loss.png, bsne_loss.png, local_vs_global.png\")\n",
    "\n",
    "print(\"\\n=== Starting Fine-tuning ===\")\n",
    "bsne_model.classifier.requires_grad_(True)\n",
    "optimizer = torch.optim.AdamW(\n",
    "    bsne_model.parameters(),\n",
    "    lr=args['finetune_lr'],\n",
    "    weight_decay=5e-5\n",
    ")\n",
    "scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(\n",
    "    optimizer, mode='max', factor=0.5, patience=10, verbose=True\n",
    ")\n",
    "\n",
    "best_val_auc = 0.0\n",
    "best_model_state = None\n",
    "train_pos, train_neg = pos_neg_split(idx_train, y_train)\n",
    "\n",
    "no_improve_epochs = 0\n",
    "early_stop = False\n",
    "\n",
    "for epoch in range(args['num_epochs']):\n",
    "    if early_stop:\n",
    "        break\n",
    "\n",
    "    bsne_model.train()\n",
    "    total_loss = 0.0\n",
    "\n",
    "    # 采样中心节点\n",
    "    batch_centers = rd.sample(train_pos + train_neg, args['batch_size'])\n",
    "    subgraph_data = []\n",
    "\n",
    "    for xi in batch_centers:\n",
    "        subgraph = create_node_subgraph(xi, feat_data, edge_indexs, device)\n",
    "        #         print(len(subgraph['features']))\n",
    "        subgraph_data.append(subgraph)\n",
    "\n",
    "    optimizer.zero_grad()\n",
    "\n",
    "    center_logits, _ = bsne_model(subgraph_data)  # [B, 2]\n",
    "\n",
    "    # 提取中心节点标签\n",
    "    labels_center = torch.tensor([labels[xi] for xi in batch_centers]).to(device).long()\n",
    "\n",
    "    # 计算分类损失\n",
    "    cls_loss = F.nll_loss(center_logits, labels_center)\n",
    "\n",
    "    cls_loss.backward()\n",
    "    optimizer.step()\n",
    "\n",
    "    total_loss += cls_loss.item()\n",
    "\n",
    "    avg_loss = total_loss / args['batch_size']\n",
    "    writer.add_scalar('FineTune/Train_Loss', avg_loss, epoch)\n",
    "\n",
    "    if epoch % 5 == 0:\n",
    "        val_auc, val_ap, val_f1, val_g_mean = test(idx_val, y_val, bsne_model, feat_data, edge_indexs, device)\n",
    "\n",
    "        writer.add_scalar('Validation/AUC', val_auc, epoch)\n",
    "        writer.add_scalar('Validation/F1', val_f1, epoch)\n",
    "        writer.add_scalar('Validation/GMean', val_g_mean, epoch)\n",
    "\n",
    "        print(f'Epoch: {epoch:03d} | Loss: {avg_loss:.4f} | Val AUC: {val_auc:.4f} | Val F1: {val_f1:.4f}')\n",
    "\n",
    "        scheduler.step(val_auc)\n",
    "\n",
    "        if val_auc > best_val_auc:\n",
    "            best_val_auc = val_auc\n",
    "            no_improve_epochs = 0\n",
    "            best_model_state = copy.deepcopy(bsne_model.state_dict())\n",
    "        else:\n",
    "            no_improve_epochs += 1\n",
    "\n",
    "        if no_improve_epochs >= args['patience']:\n",
    "            print(f\"Early stopping at epoch {epoch}\")\n",
    "            early_stop = True\n",
    "\n",
    "# 加载最佳模型\n",
    "bsne_model.load_state_dict(best_model_state)\n",
    "\n",
    "# 最终测试\n",
    "test_auc, test_ap, test_f1, test_g_mean = test(idx_test, y_test, bsne_model, feat_data, edge_indexs, device)\n",
    "print(f'\\n=== Final Test Results ===')\n",
    "print(f'Test AUC: {test_auc:.4f} | Test AP: {test_ap:.4f} | Test F1: {test_f1:.4f} | G-mean: {test_g_mean:.4f}')\n",
    "writer.close()\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "28d3c895",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[1.107200575515279,\n",
       " 1.061684052171675,\n",
       " 1.1939222378576388,\n",
       " 0.9272324137584343,\n",
       " 0.991151823999164,\n",
       " 0.9214618036826305,\n",
       " 0.9657002502952303,\n",
       " 1.2310986091264289,\n",
       " 1.1565664696735918,\n",
       " 0.974084948155445,\n",
       " 0.9471994480099641,\n",
       " 0.9979934395991935,\n",
       " 1.0855401512744671,\n",
       " 0.9780997046351494,\n",
       " 0.9541658706086664,\n",
       " 1.035424557346193,\n",
       " 0.9734147768536383,\n",
       " 1.039191224022804,\n",
       " 0.737816652170451,\n",
       " 1.2446047879151156,\n",
       " 1.0287895924087023,\n",
       " 0.8659443910284015,\n",
       " 1.009443279384725,\n",
       " 1.114789137073348,\n",
       " 0.7646885973115873,\n",
       " 0.9345326595268256,\n",
       " 0.9949457430082145,\n",
       " 0.9365827034306424,\n",
       " 1.0206016789781178,\n",
       " 1.023474277019406,\n",
       " 1.086509359770415,\n",
       " 0.9269521661328066,\n",
       " 0.6431055814125219,\n",
       " 1.128927328120626,\n",
       " 0.9031508466609557,\n",
       " 1.2612834583808885,\n",
       " 1.0648247609062917,\n",
       " 1.0563186111910594,\n",
       " 0.8997309025497133]"
      ]
     },
     "execution_count": 8,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "all_bsne_losses"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "925f11f8",
   "metadata": {},
   "outputs": [],
   "source": [
    "plt.figure(figsize=(12, 6))\n",
    "plt.plot(all_bsne_losses, 'g-', linewidth=2, label='BSNE Loss')\n",
    "plt.title('Total BSNE Loss During Pretraining')\n",
    "plt.xlabel('Epoch')\n",
    "plt.ylabel('Loss')\n",
    "plt.grid(True)\n",
    "plt.savefig('bsne_loss_tmp.png')\n",
    "plt.close()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "id": "c29015c8",
   "metadata": {},
   "outputs": [],
   "source": [
    "plot_losses(all_global_losses, 'Global Loss During Pretraining', 'Loss', 'global_loss_tmp.png')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "05b41bcb",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "8a4fc62b",
   "metadata": {},
   "outputs": [],
   "source": [
    "def plot_losses(losses, title, ylabel, filename):\n",
    "    plt.figure(figsize=(12, 6))\n",
    "    \n",
    "    # 绘制每个batch的详细损失（浅色点）\n",
    "    for epoch, batch_losses in enumerate(losses):\n",
    "        plt.scatter([epoch] * len(batch_losses), batch_losses, \n",
    "                   color='lightblue', alpha=0.4, s=10)\n",
    "    \n",
    "    # 绘制每个epoch的平均损失（深色线）\n",
    "    avg_losses = [np.mean(batch_losses) for batch_losses in losses]\n",
    "    plt.plot(avg_losses, 'b-', linewidth=2, label='Epoch Average')\n",
    "    \n",
    "    # 绘制滑动平均线（窗口=5）\n",
    "    window = 5\n",
    "    moving_avg = [np.mean(avg_losses[max(0, i-window):i+1]) \n",
    "                 for i in range(len(avg_losses))]\n",
    "    plt.plot(moving_avg, 'r--', linewidth=2, label=f'{window}-Epoch Moving Avg')\n",
    "    \n",
    "    plt.title(title)\n",
    "    plt.xlabel('Epoch')\n",
    "    plt.ylabel(ylabel)\n",
    "    plt.legend()\n",
    "    plt.grid(True)\n",
    "    plt.savefig(filename)\n",
    "    plt.close()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "0e1139ba",
   "metadata": {},
   "outputs": [],
   "source": [
    "plot_losses(all_local_losses, 'Local Loss During Pretraining', 'Loss', 'local_loss.png')\n",
    "plot_losses(all_global_losses, 'Global Loss During Pretraining', 'Loss', 'global_loss.png')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "c155328f",
   "metadata": {},
   "outputs": [],
   "source": [
    "plt.figure(figsize=(12, 6))\n",
    "plt.plot(all_bsne_losses, 'g-', linewidth=2, label='BSNE Loss')\n",
    "plt.title('Total BSNE Loss During Pretraining')\n",
    "plt.xlabel('Epoch')\n",
    "plt.ylabel('Loss')\n",
    "plt.grid(True)\n",
    "plt.savefig('bsne_loss_2.png')\n",
    "plt.close()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "b7654321",
   "metadata": {},
   "outputs": [],
   "source": [
    "all_local_losses = []      # 每个epoch的batch本地损失列表\n",
    "all_global_losses = []     # 每个epoch的batch全局损失列表\n",
    "all_bsne_losses = []       # 每个epoch的总BSNE损失\n",
    "epoch_avg_local = []       # 每个epoch的平均本地损失\n",
    "epoch_avg_global = []      # 每个epoch的平均全局损失"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "5afcf296",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[[0.24077970578109703,\n",
       "  0.5309440543002869,\n",
       "  0.23683794257590876,\n",
       "  0.4337542232678742,\n",
       "  0.28919804016547707,\n",
       "  0.3142543976607615,\n",
       "  0.8370750785540604,\n",
       "  0.482150498641184],\n",
       " [0.35847220216469966,\n",
       "  0.5828394414988239,\n",
       "  0.31661024481740124,\n",
       "  0.27282050139086444,\n",
       "  0.2776667345979521,\n",
       "  0.15064471262112902,\n",
       "  0.37023451344958064,\n",
       "  0.4416000576624181],\n",
       " [0.331299666806569,\n",
       "  0.36379557759751235,\n",
       "  0.3278834264509106,\n",
       "  0.43312059838619804,\n",
       "  0.32136221530075815,\n",
       "  0.4961518455078405,\n",
       "  0.5163381364553391,\n",
       "  0.558980079940234],\n",
       " [0.22573480840503052,\n",
       "  0.2487492501830384,\n",
       "  0.41667603392077857,\n",
       "  0.4637613585251056,\n",
       "  0.21235888023341956,\n",
       "  0.5427531143148364,\n",
       "  0.3614275412545898,\n",
       "  0.24770895933382947],\n",
       " [0.11228344295518922,\n",
       "  0.41713923685461596,\n",
       "  0.3906627700088622,\n",
       "  0.3536980455834383,\n",
       "  0.2373503241580266,\n",
       "  0.3202291602860091,\n",
       "  0.47558235634839735,\n",
       "  0.36197982518048677],\n",
       " [0.3933808141970236,\n",
       "  0.3143788155497787,\n",
       "  0.31010235871602443,\n",
       "  0.28307826862928015,\n",
       "  0.3877842523965544,\n",
       "  0.47605363751120083,\n",
       "  0.395236642820297,\n",
       "  0.5035689219568543],\n",
       " [0.20710167412737757,\n",
       "  0.3384306206263221,\n",
       "  0.2569647214140769,\n",
       "  0.2727229424543655,\n",
       "  0.31372380841340564,\n",
       "  0.4408056662159113,\n",
       "  0.26950642149796655,\n",
       "  0.23230860288509306],\n",
       " [0.613509078534179,\n",
       "  0.43992502843742687,\n",
       "  0.22482483902350744,\n",
       "  0.499227534816833,\n",
       "  0.3594636458198602,\n",
       "  0.27598275685894375,\n",
       "  0.3212553819187652,\n",
       "  0.2699050097148768],\n",
       " [0.33949902845006225,\n",
       "  0.2834578645441001,\n",
       "  0.42204911778143966,\n",
       "  0.24526893898763177,\n",
       "  0.7239597380121081,\n",
       "  0.14728030561087843,\n",
       "  0.15460636012926882,\n",
       "  0.16319120588473499],\n",
       " [0.21274660531939604,\n",
       "  0.3634331971519674,\n",
       "  0.20827086458481447,\n",
       "  0.3093904780414203,\n",
       "  0.10795134910833319,\n",
       "  0.43828953054536285,\n",
       "  0.23510206929456023,\n",
       "  0.2891446056127548],\n",
       " [0.4753040691633775,\n",
       "  0.4831882619973511,\n",
       "  0.13107668768583094,\n",
       "  0.22543309996961938,\n",
       "  0.23123617117884088,\n",
       "  0.4363421149229563,\n",
       "  0.1274046626012548,\n",
       "  0.30041515172760397],\n",
       " [0.34857267728161573,\n",
       "  0.29750104183104303,\n",
       "  0.5267058119641014,\n",
       "  0.2201397416022505,\n",
       "  0.27139109256595634,\n",
       "  0.5767770926737985,\n",
       "  0.44185984168277775,\n",
       "  0.46730320177240287],\n",
       " [0.27701568951414574,\n",
       "  0.19578653543180927,\n",
       "  0.2872095752340156,\n",
       "  0.39817284059887414,\n",
       "  0.2953602580305122,\n",
       "  0.33609386086591825,\n",
       "  0.4533187054702519,\n",
       "  0.2717377773091972],\n",
       " [0.4527913193736127,\n",
       "  0.17159865246013084,\n",
       "  0.23638812223818936,\n",
       "  0.30858194865649563,\n",
       "  0.2842371431371817,\n",
       "  0.26338306542705475,\n",
       "  0.3733947589851772,\n",
       "  0.12501077436907454],\n",
       " [0.5482592333995506,\n",
       "  0.15909624142289494,\n",
       "  0.414348889004525,\n",
       "  0.3603451177154438,\n",
       "  0.4250855487133012,\n",
       "  0.38925320054508256,\n",
       "  0.40821151506547937,\n",
       "  0.23163164605459827],\n",
       " [0.2812480704376305,\n",
       "  0.4602745607167693,\n",
       "  0.3369765064495854,\n",
       "  0.20484331691091684,\n",
       "  0.30139564570043414,\n",
       "  0.2914245401496037,\n",
       "  0.368851683396254,\n",
       "  0.21542035755969743],\n",
       " [0.39918701318474853,\n",
       "  0.2411363238259811,\n",
       "  0.3239793296250654,\n",
       "  0.035815637090645655,\n",
       "  0.32949888559643353,\n",
       "  0.6535441140412672,\n",
       "  0.15726167841128494,\n",
       "  0.1740841525382951],\n",
       " [0.4137768978618154,\n",
       "  0.4505459548992032,\n",
       "  0.40478894141963623,\n",
       "  0.2219837275788654,\n",
       "  0.31095118951211925,\n",
       "  0.1412136878219761,\n",
       "  0.6411581423976054,\n",
       "  0.19081708436468936],\n",
       " [0.27309253581814974,\n",
       "  0.35315747176260087,\n",
       "  0.3531599729312734,\n",
       "  0.5993228031250797,\n",
       "  0.468535632384666,\n",
       "  0.310063350797305,\n",
       "  0.24926417234783654,\n",
       "  0.4400267124925902],\n",
       " [0.21156418333300447,\n",
       "  0.33838921145772055,\n",
       "  0.2078155006316447,\n",
       "  0.2649798913240993,\n",
       "  0.32519020900565765,\n",
       "  0.29815201054026136,\n",
       "  0.45913885272504396,\n",
       "  0.3947284708919326],\n",
       " [0.26728259094501944,\n",
       "  0.25239415643362384,\n",
       "  0.31196251971580624,\n",
       "  0.5311987237098863,\n",
       "  0.23044120787049302,\n",
       "  0.37037985076929064,\n",
       "  0.4319287051481948,\n",
       "  0.23988768323611218],\n",
       " [0.24180570119059316,\n",
       "  0.4108535954161221,\n",
       "  0.5057065627014953,\n",
       "  0.36705705888668433,\n",
       "  0.2284288906106975,\n",
       "  0.4959338044161394,\n",
       "  0.15628876581497064,\n",
       "  0.7699292849904898],\n",
       " [0.29686290724360936,\n",
       "  0.24737634031643752,\n",
       "  0.320410016256413,\n",
       "  0.3073016558182863,\n",
       "  0.2925938324722666,\n",
       "  0.43471896889481415,\n",
       "  0.2836510815171295,\n",
       "  0.41243944182569636],\n",
       " [0.5264523752068283,\n",
       "  0.24743811114933248,\n",
       "  0.471695111927644,\n",
       "  0.33070987021226905,\n",
       "  0.1443102096252315,\n",
       "  0.33637537855750455,\n",
       "  0.27152183353771947,\n",
       "  0.19383866973390107],\n",
       " [0.31438565347775393,\n",
       "  0.4332224441345456,\n",
       "  0.4706727455561247,\n",
       "  0.6245311685456076,\n",
       "  0.24008864707209823,\n",
       "  0.36209606567582564,\n",
       "  0.19439170612923143,\n",
       "  0.28023774032227944],\n",
       " [0.3299395414037964,\n",
       "  0.1860574742540427,\n",
       "  0.40020592499853996,\n",
       "  0.2743661765515757,\n",
       "  0.3079225581289539,\n",
       "  0.34752751805479193,\n",
       "  0.3924899679840168,\n",
       "  0.15575278312616422],\n",
       " [0.3494813199761446,\n",
       "  0.3582413341615199,\n",
       "  0.3685433107936547,\n",
       "  0.31394082311795823,\n",
       "  0.2134187894293001,\n",
       "  0.4183666591530075,\n",
       "  0.543967649037663,\n",
       "  0.2731651282683695],\n",
       " [0.32827616522932174,\n",
       "  0.3185570273190643,\n",
       "  0.47154740697269076,\n",
       "  0.505421817991989,\n",
       "  0.3396993715628811,\n",
       "  0.32097981063166814,\n",
       "  0.26794652339364816,\n",
       "  0.20008574979389568],\n",
       " [0.15861461683339986,\n",
       "  0.22315138981945828,\n",
       "  0.2853798642566086,\n",
       "  0.22193341073978204,\n",
       "  0.2881487149131238,\n",
       "  0.08397689715674979,\n",
       "  0.2332827417218077,\n",
       "  0.33325011588970455],\n",
       " [0.34484462102610935,\n",
       "  0.26540768846527163,\n",
       "  0.6201435204392114,\n",
       "  0.2814745041833658,\n",
       "  0.3964680867459225,\n",
       "  0.18887905335941504,\n",
       "  0.18334050374624702,\n",
       "  0.24400838203049674],\n",
       " [0.32687131268020997,\n",
       "  0.41948223332262763,\n",
       "  0.35436189466871726,\n",
       "  0.24583755386088244,\n",
       "  0.42753338562996307,\n",
       "  0.35946383909230223,\n",
       "  0.36406186838962046,\n",
       "  0.24350751879811128],\n",
       " [0.48402365714453627,\n",
       "  0.27915225356096,\n",
       "  0.31220663046995983,\n",
       "  0.19228362307041486,\n",
       "  0.3446888997349242,\n",
       "  0.25581531247560313,\n",
       "  0.462573057220069,\n",
       "  0.3326529071123656],\n",
       " [0.5940821350639514,\n",
       "  0.5407762330441176,\n",
       "  0.35267981329864756,\n",
       "  0.3230832608761543,\n",
       "  0.23863436638909719,\n",
       "  0.19743321727273547,\n",
       "  0.43882314834981606,\n",
       "  0.5510664244019348],\n",
       " [0.35037507313182453,\n",
       "  0.26199472722231076,\n",
       "  0.3165002157495674,\n",
       "  0.55653644326937,\n",
       "  0.3798979587479481,\n",
       "  0.2548304219054064,\n",
       "  0.29325081016129645,\n",
       "  0.4832320680185281],\n",
       " [0.29759216753090045,\n",
       "  0.5209152782603155,\n",
       "  0.4026615535684471,\n",
       "  0.3469895880333425,\n",
       "  0.39609040965389514,\n",
       "  0.3770869676666916,\n",
       "  0.3423372025519049,\n",
       "  0.3334015774424369],\n",
       " [0.45288256696574913,\n",
       "  0.23793014956705602,\n",
       "  0.21706045586151457,\n",
       "  0.2819629666770102,\n",
       "  0.5385590208842157,\n",
       "  0.21070592149252007,\n",
       "  0.2600828323581904,\n",
       "  0.154783225191383],\n",
       " [0.521847980383236,\n",
       "  0.19290248077512995,\n",
       "  0.12734656456485133,\n",
       "  0.19275325230594795,\n",
       "  0.20721078108906882,\n",
       "  0.22684142014083097,\n",
       "  0.3182793703071937,\n",
       "  0.12838580939370903],\n",
       " [0.4124948832560794,\n",
       "  0.3764974819184008,\n",
       "  0.19107719599699788,\n",
       "  0.42123217489465403,\n",
       "  0.3416848645143486,\n",
       "  0.5130169447896535,\n",
       "  0.2009370468045921,\n",
       "  0.3161004200038955],\n",
       " [0.2720391949378054,\n",
       "  0.2313208121331535,\n",
       "  0.2663109749287238,\n",
       "  0.3833245370831804,\n",
       "  0.14094882538740833,\n",
       "  0.27524984390233365,\n",
       "  0.5618672226674754,\n",
       "  0.2595374684380139]]"
      ]
     },
     "execution_count": 13,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "all_local_losses"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "6b5f8d4c",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[[-0.12273535877466202,\n",
       "  0.4293830990791321,\n",
       "  0.823665976524353,\n",
       "  1.4003627300262451,\n",
       "  1.3568060398101807,\n",
       "  1.4502134323120117,\n",
       "  0.8161609768867493,\n",
       "  1.3646652698516846],\n",
       " [0.19095304608345032,\n",
       "  1.085836410522461,\n",
       "  0.9662845134735107,\n",
       "  0.9494696855545044,\n",
       "  0.7412376999855042,\n",
       "  -0.06231619790196419,\n",
       "  0.18970836699008942,\n",
       "  1.2337946891784668],\n",
       " [0.08880627900362015,\n",
       "  -0.2805607318878174,\n",
       "  1.4175548553466797,\n",
       "  1.0951932668685913,\n",
       "  0.5676320195198059,\n",
       "  0.8583052158355713,\n",
       "  0.27465108036994934,\n",
       "  0.11610040068626404],\n",
       " [1.146021842956543,\n",
       "  0.08929156512022018,\n",
       "  0.23736584186553955,\n",
       "  1.3064849376678467,\n",
       "  0.5534606575965881,\n",
       "  0.8520834445953369,\n",
       "  0.6394424438476562,\n",
       "  1.0652333498001099],\n",
       " [0.5067155957221985,\n",
       "  0.9345248937606812,\n",
       "  0.9495549201965332,\n",
       "  0.8420770764350891,\n",
       "  1.279174566268921,\n",
       "  1.210945725440979,\n",
       "  1.0312414169311523,\n",
       "  0.08105404675006866],\n",
       " [0.0018882793374359608,\n",
       "  0.9296644926071167,\n",
       "  1.0364984273910522,\n",
       "  0.4903882145881653,\n",
       "  0.32097095251083374,\n",
       "  1.2088464498519897,\n",
       "  1.186440348625183,\n",
       "  0.1896434873342514],\n",
       " [0.7417868375778198,\n",
       "  1.3401920795440674,\n",
       "  1.3546912670135498,\n",
       "  0.4262484014034271,\n",
       "  0.023525984957814217,\n",
       "  -0.49147939682006836,\n",
       "  0.3576827347278595,\n",
       "  0.9867426753044128],\n",
       " [1.1499103307724,\n",
       "  1.0583412647247314,\n",
       "  0.036260828375816345,\n",
       "  0.5027261972427368,\n",
       "  0.2508220672607422,\n",
       "  0.4437981843948364,\n",
       "  1.3193049430847168,\n",
       "  1.2874784469604492],\n",
       " [-0.21574269235134125,\n",
       "  1.2587610483169556,\n",
       "  0.35888543725013733,\n",
       "  0.043636783957481384,\n",
       "  0.6596008539199829,\n",
       "  0.6751604676246643,\n",
       "  0.7701174020767212,\n",
       "  0.012348154559731483],\n",
       " [0.9688050150871277,\n",
       "  -0.5840412378311157,\n",
       "  0.603628396987915,\n",
       "  0.12677809596061707,\n",
       "  1.2660385370254517,\n",
       "  1.341729998588562,\n",
       "  0.3115597367286682,\n",
       "  1.274918556213379],\n",
       " [1.6444662809371948,\n",
       "  0.21814221143722534,\n",
       "  1.2568672895431519,\n",
       "  0.42314767837524414,\n",
       "  0.9070594310760498,\n",
       "  -0.2976374328136444,\n",
       "  0.4579984247684479,\n",
       "  1.0580129623413086],\n",
       " [0.47318726778030396,\n",
       "  0.3724735975265503,\n",
       "  0.814996063709259,\n",
       "  0.7860387563705444,\n",
       "  -0.380690336227417,\n",
       "  0.48686689138412476,\n",
       "  0.4760197401046753,\n",
       "  0.3814287781715393],\n",
       " [0.20718543231487274,\n",
       "  0.6377472281455994,\n",
       "  0.6881259083747864,\n",
       "  1.32895028591156,\n",
       "  0.8997262120246887,\n",
       "  1.022632122039795,\n",
       "  0.8845871090888977,\n",
       "  -0.11630374193191528],\n",
       " [0.31421974301338196,\n",
       "  -0.9391127228736877,\n",
       "  0.09360677003860474,\n",
       "  0.6385204195976257,\n",
       "  1.0772550106048584,\n",
       "  0.9448704719543457,\n",
       "  1.1468104124069214,\n",
       "  -0.09299150854349136],\n",
       " [0.6131280660629272,\n",
       "  0.18288639187812805,\n",
       "  -0.5135511159896851,\n",
       "  0.8015597462654114,\n",
       "  0.8262664079666138,\n",
       "  0.6185048818588257,\n",
       "  0.163734570145607,\n",
       "  0.08700539916753769],\n",
       " [0.0963938757777214,\n",
       "  1.155400037765503,\n",
       "  0.19505204260349274,\n",
       "  0.7739320993423462,\n",
       "  1.2842259407043457,\n",
       "  0.26025062799453735,\n",
       "  0.5138598680496216,\n",
       "  0.33968180418014526],\n",
       " [-0.044324763119220734,\n",
       "  0.5439729690551758,\n",
       "  1.201080322265625,\n",
       "  0.759075403213501,\n",
       "  -0.09308825433254242,\n",
       "  0.06321369111537933,\n",
       "  0.1273428201675415,\n",
       "  0.29194745421409607],\n",
       " [0.9288972020149231,\n",
       "  0.7744485139846802,\n",
       "  0.46374303102493286,\n",
       "  -0.14436770975589752,\n",
       "  0.5389517545700073,\n",
       "  0.3376571238040924,\n",
       "  0.011137460358440876,\n",
       "  1.0725018978118896],\n",
       " [1.8619627952575684,\n",
       "  0.8220572471618652,\n",
       "  0.7899228930473328,\n",
       "  0.131935715675354,\n",
       "  0.5495374798774719,\n",
       "  0.27542614936828613,\n",
       "  -0.20990456640720367,\n",
       "  0.2087576687335968],\n",
       " [1.0276451110839844,\n",
       "  1.6946643590927124,\n",
       "  -0.3888336420059204,\n",
       "  0.39555269479751587,\n",
       "  -0.1724851429462433,\n",
       "  1.8001785278320312,\n",
       "  1.2338197231292725,\n",
       "  0.27554452419281006],\n",
       " [-0.1710849106311798,\n",
       "  1.1599717140197754,\n",
       "  0.9712091088294983,\n",
       "  -0.502377450466156,\n",
       "  1.031782627105713,\n",
       "  0.457805871963501,\n",
       "  0.8003222942352295,\n",
       "  -0.010724145919084549],\n",
       " [0.47419267892837524,\n",
       "  0.8790967464447021,\n",
       "  0.011865630745887756,\n",
       "  0.3689364790916443,\n",
       "  1.1782057285308838,\n",
       "  0.09413128346204758,\n",
       "  -0.34821099042892456,\n",
       "  1.5008718967437744],\n",
       " [0.17016997933387756,\n",
       "  1.4675791263580322,\n",
       "  0.3571184575557709,\n",
       "  1.7289690971374512,\n",
       "  -0.16811668872833252,\n",
       "  0.3795774579048157,\n",
       "  1.246758222579956,\n",
       "  0.4002976417541504],\n",
       " [0.4303833246231079,\n",
       "  0.03993453085422516,\n",
       "  0.15448635816574097,\n",
       "  0.34564489126205444,\n",
       "  -0.1834205985069275,\n",
       "  1.5958338975906372,\n",
       "  0.3791021704673767,\n",
       "  0.2059432417154312],\n",
       " [-0.18028225004673004,\n",
       "  0.5707733035087585,\n",
       "  0.7852591872215271,\n",
       "  -0.29141512513160706,\n",
       "  0.829521894454956,\n",
       "  -0.0977068692445755,\n",
       "  0.8416354060173035,\n",
       "  1.382007360458374],\n",
       " [1.6502243280410767,\n",
       "  0.6876291036605835,\n",
       "  1.1310594081878662,\n",
       "  0.43456536531448364,\n",
       "  -0.7314342260360718,\n",
       "  0.9112824201583862,\n",
       "  0.643757164478302,\n",
       "  1.7870371341705322],\n",
       " [-0.009965799748897552,\n",
       "  0.5384424924850464,\n",
       "  0.3044642508029938,\n",
       "  -1.0002415180206299,\n",
       "  1.6627384424209595,\n",
       "  0.6525346636772156,\n",
       "  -0.22996525466442108,\n",
       "  0.09897933155298233],\n",
       " [0.621403694152832,\n",
       "  0.5967896580696106,\n",
       "  1.3986972570419312,\n",
       "  1.6381036043167114,\n",
       "  1.0785936117172241,\n",
       "  1.4753040075302124,\n",
       "  1.8028101921081543,\n",
       "  0.7461230754852295],\n",
       " [1.0099613666534424,\n",
       "  0.554298996925354,\n",
       "  1.0358808040618896,\n",
       "  -0.04851757735013962,\n",
       "  1.1820857524871826,\n",
       "  1.085620641708374,\n",
       "  0.4500579833984375,\n",
       "  0.6558305025100708],\n",
       " [0.7692205309867859,\n",
       "  0.9245419502258301,\n",
       "  0.19559352099895477,\n",
       "  1.5708156824111938,\n",
       "  0.9757193326950073,\n",
       "  -0.3849753439426422,\n",
       "  0.19606082141399384,\n",
       "  1.7395235300064087],\n",
       " [0.19273358583450317,\n",
       "  0.11489081382751465,\n",
       "  0.1779593676328659,\n",
       "  1.0263075828552246,\n",
       "  1.0018764734268188,\n",
       "  1.8777252435684204,\n",
       "  1.463614583015442,\n",
       "  -0.1974657028913498],\n",
       " [0.8736598491668701,\n",
       "  1.522114634513855,\n",
       "  0.5075078010559082,\n",
       "  0.7025623321533203,\n",
       "  0.25490957498550415,\n",
       "  0.3465438187122345,\n",
       "  -0.06286951899528503,\n",
       "  0.5118458271026611],\n",
       " [-0.6517409682273865,\n",
       "  0.39915913343429565,\n",
       "  -0.4678086042404175,\n",
       "  0.8883904814720154,\n",
       "  2.4035801887512207,\n",
       "  0.5390931367874146,\n",
       "  1.6625680923461914,\n",
       "  0.9975532293319702],\n",
       " [0.17691586911678314,\n",
       "  -0.3883551359176636,\n",
       "  2.153653144836426,\n",
       "  0.709580659866333,\n",
       "  1.1675981283187866,\n",
       "  1.767632007598877,\n",
       "  0.18795245885849,\n",
       "  -0.3351629674434662],\n",
       " [2.8169031143188477,\n",
       "  -0.5442997813224792,\n",
       "  0.10455396771430969,\n",
       "  -0.41172757744789124,\n",
       "  0.5558698177337646,\n",
       "  0.12082307040691376,\n",
       "  2.0866010189056396,\n",
       "  0.8465304374694824],\n",
       " [0.8605324029922485,\n",
       "  -0.20478951930999756,\n",
       "  0.5333120226860046,\n",
       "  -0.19895248115062714,\n",
       "  1.2670618295669556,\n",
       "  1.560657262802124,\n",
       "  0.8401447534561157,\n",
       "  2.2661631107330322],\n",
       " [1.2229810953140259,\n",
       "  1.1150610446929932,\n",
       "  0.9145590662956238,\n",
       "  -0.1727176159620285,\n",
       "  1.2289706468582153,\n",
       "  1.1284265518188477,\n",
       "  0.09285375475883484,\n",
       "  0.9030546545982361],\n",
       " [0.8467965126037598,\n",
       "  0.5670166015625,\n",
       "  0.4259852170944214,\n",
       "  0.178435280919075,\n",
       "  0.22044330835342407,\n",
       "  1.0877060890197754,\n",
       "  0.024525798857212067,\n",
       "  -0.03997735306620598],\n",
       " [1.335908055305481,\n",
       "  1.631274700164795,\n",
       "  1.1369702816009521,\n",
       "  1.5947016477584839,\n",
       "  0.49912789463996887,\n",
       "  -0.028591182082891464,\n",
       "  1.3367502689361572,\n",
       "  -0.5721298456192017],\n",
       " [1.727461814880371,\n",
       "  0.7466651797294617,\n",
       "  0.33674049377441406,\n",
       "  1.2122725248336792,\n",
       "  0.6484026908874512,\n",
       "  1.008802056312561,\n",
       "  0.625784158706665,\n",
       "  0.33521130681037903],\n",
       " [0.6457628607749939,\n",
       "  -0.8702088594436646,\n",
       "  1.286202311515808,\n",
       "  0.9541146755218506,\n",
       "  -0.42700281739234924,\n",
       "  0.8587853312492371,\n",
       "  0.4369887113571167,\n",
       "  1.1375935077667236],\n",
       " [0.6594102382659912,\n",
       "  0.4240157902240753,\n",
       "  1.1186976432800293,\n",
       "  -0.03125776723027229,\n",
       "  0.3807719647884369,\n",
       "  0.8530459403991699,\n",
       "  1.3497250080108643,\n",
       "  0.3045770525932312],\n",
       " [1.0847193002700806,\n",
       "  0.45566242933273315,\n",
       "  -0.015679514035582542,\n",
       "  1.620813012123108,\n",
       "  1.6149189472198486,\n",
       "  0.494651734828949,\n",
       "  0.6872051954269409,\n",
       "  0.6699979901313782],\n",
       " [1.466213583946228,\n",
       "  0.349964439868927,\n",
       "  0.9522100687026978,\n",
       "  0.3921002447605133,\n",
       "  0.8686020374298096,\n",
       "  0.48416244983673096,\n",
       "  0.7669258117675781,\n",
       "  0.5969817638397217],\n",
       " [-0.05682779476046562,\n",
       "  1.329907774925232,\n",
       "  1.740923523902893,\n",
       "  1.3203623294830322,\n",
       "  0.7283671498298645,\n",
       "  -0.43986573815345764,\n",
       "  1.568872094154358,\n",
       "  0.5712873935699463],\n",
       " [-0.4001061022281647,\n",
       "  1.2929438352584839,\n",
       "  0.9078203439712524,\n",
       "  2.008751392364502,\n",
       "  1.0284215211868286,\n",
       "  0.7485091686248779,\n",
       "  1.9181348085403442,\n",
       "  0.7085044980049133],\n",
       " [0.5234508514404297,\n",
       "  0.9648851156234741,\n",
       "  -0.014580228365957737,\n",
       "  -0.14129821956157684,\n",
       "  0.5580005645751953,\n",
       "  0.6662064790725708,\n",
       "  -0.3447763919830322,\n",
       "  0.5539442300796509],\n",
       " [0.5868099927902222,\n",
       "  1.1373411417007446,\n",
       "  0.9787418842315674,\n",
       "  0.9974232912063599,\n",
       "  0.8965637683868408,\n",
       "  0.6489600539207458,\n",
       "  -0.2312498241662979,\n",
       "  0.5547488927841187],\n",
       " [0.8967232704162598,\n",
       "  1.1268131732940674,\n",
       "  -0.2429850697517395,\n",
       "  0.11191015690565109,\n",
       "  0.33183661103248596,\n",
       "  1.5091993808746338,\n",
       "  -0.5315638780593872,\n",
       "  0.7812240123748779],\n",
       " [0.22916576266288757,\n",
       "  -0.1184636652469635,\n",
       "  0.7850866317749023,\n",
       "  0.6542977690696716,\n",
       "  0.7238544225692749,\n",
       "  -0.3422430455684662,\n",
       "  1.1523951292037964,\n",
       "  1.8055365085601807],\n",
       " [0.8869059085845947,\n",
       "  0.6778165102005005,\n",
       "  0.20076753199100494,\n",
       "  0.8699551820755005,\n",
       "  1.0303726196289062,\n",
       "  0.8645713329315186,\n",
       "  0.6738618016242981,\n",
       "  0.9608979225158691],\n",
       " [1.0404930114746094,\n",
       "  0.6775041222572327,\n",
       "  0.01664348691701889,\n",
       "  0.738246500492096,\n",
       "  -0.6736207604408264,\n",
       "  -0.1557050198316574,\n",
       "  1.106398105621338,\n",
       "  0.11725912243127823],\n",
       " [1.148938775062561,\n",
       "  0.5770850777626038,\n",
       "  1.9515067338943481,\n",
       "  0.03741566464304924,\n",
       "  0.418009877204895,\n",
       "  0.7584006190299988,\n",
       "  0.07960788160562515,\n",
       "  -0.72380530834198],\n",
       " [0.9678298830986023,\n",
       "  0.9638010263442993,\n",
       "  0.7628788948059082,\n",
       "  0.09845443814992905,\n",
       "  0.8171525001525879,\n",
       "  0.13284318149089813,\n",
       "  -0.0912579894065857,\n",
       "  0.2398718297481537],\n",
       " [0.8475292325019836,\n",
       "  1.0412299633026123,\n",
       "  0.7081903219223022,\n",
       "  -0.3048730492591858,\n",
       "  0.8129615187644958,\n",
       "  1.3567229509353638,\n",
       "  1.3846336603164673,\n",
       "  1.1298003196716309],\n",
       " [0.7563071250915527,\n",
       "  0.9692034721374512,\n",
       "  0.5080716013908386,\n",
       "  -0.4015829861164093,\n",
       "  0.8556910753250122,\n",
       "  1.1129586696624756,\n",
       "  1.162887692451477,\n",
       "  0.2965630292892456],\n",
       " [-0.23450809717178345,\n",
       "  0.1097637340426445,\n",
       "  1.3509008884429932,\n",
       "  0.8699640035629272,\n",
       "  1.3264583349227905,\n",
       "  0.8755963444709778,\n",
       "  0.42748528718948364,\n",
       "  0.26399633288383484],\n",
       " [-0.06503137201070786,\n",
       "  0.3414119482040405,\n",
       "  -0.3284122943878174,\n",
       "  0.588634729385376,\n",
       "  -0.08956240862607956,\n",
       "  1.2712421417236328,\n",
       "  0.7651824951171875,\n",
       "  -0.2779000699520111],\n",
       " [1.5660548210144043,\n",
       "  -0.1147783100605011,\n",
       "  0.611056923866272,\n",
       "  1.2002747058868408,\n",
       "  0.10782119631767273,\n",
       "  0.5114282369613647,\n",
       "  -0.03386865556240082,\n",
       "  1.610475778579712],\n",
       " [-0.38014188408851624,\n",
       "  0.3995837867259979,\n",
       "  -0.9168930649757385,\n",
       "  -0.245725616812706,\n",
       "  0.15266118943691254,\n",
       "  1.4193698167800903,\n",
       "  1.2011315822601318,\n",
       "  0.887327253818512],\n",
       " [0.8882392644882202,\n",
       "  -0.3124019503593445,\n",
       "  0.17429600656032562,\n",
       "  1.1203545331954956,\n",
       "  0.9864097833633423,\n",
       "  0.8096059560775757,\n",
       "  0.6562900543212891,\n",
       "  -0.056884199380874634],\n",
       " [1.0333786010742188,\n",
       "  1.5565909147262573,\n",
       "  0.7352113127708435,\n",
       "  0.5369771718978882,\n",
       "  1.5689046382904053,\n",
       "  -0.21768377721309662,\n",
       "  0.6518954634666443,\n",
       "  0.7909386157989502],\n",
       " [0.6819406151771545,\n",
       "  0.2596179246902466,\n",
       "  1.395141839981079,\n",
       "  0.44088810682296753,\n",
       "  0.5442743301391602,\n",
       "  0.17360247671604156,\n",
       "  1.2278478145599365,\n",
       "  0.9260964393615723],\n",
       " [0.6046690940856934,\n",
       "  1.8418974876403809,\n",
       "  1.033512830734253,\n",
       "  1.3375694751739502,\n",
       "  -0.7930704355239868,\n",
       "  1.3605326414108276,\n",
       "  0.30950579047203064,\n",
       "  1.6799215078353882],\n",
       " [1.0422734022140503,\n",
       "  0.23285263776779175,\n",
       "  0.44873595237731934,\n",
       "  -0.36531302332878113,\n",
       "  1.3732285499572754,\n",
       "  0.684841513633728,\n",
       "  0.2920619249343872,\n",
       "  0.13165713846683502],\n",
       " [0.20678088068962097,\n",
       "  0.21223777532577515,\n",
       "  1.0485395193099976,\n",
       "  0.2118672877550125,\n",
       "  1.5578877925872803,\n",
       "  1.033029556274414,\n",
       "  -0.3561978042125702,\n",
       "  1.048647403717041],\n",
       " [1.0688258409500122,\n",
       "  0.2797010838985443,\n",
       "  0.7113785147666931,\n",
       "  1.0736552476882935,\n",
       "  0.7215460538864136,\n",
       "  0.5270025730133057,\n",
       "  0.6452867388725281,\n",
       "  0.4923930764198303],\n",
       " [0.2631579339504242,\n",
       "  -0.1595335304737091,\n",
       "  2.018009662628174,\n",
       "  0.5045877695083618,\n",
       "  0.9584704637527466,\n",
       "  0.7815213203430176,\n",
       "  0.8296893239021301,\n",
       "  0.11111757159233093],\n",
       " [0.7410820126533508,\n",
       "  0.7739843130111694,\n",
       "  0.7596141695976257,\n",
       "  0.8363016247749329,\n",
       "  0.7554786801338196,\n",
       "  1.5400041341781616,\n",
       "  0.5441151857376099,\n",
       "  0.8477463126182556],\n",
       " [0.7982027530670166,\n",
       "  0.759619414806366,\n",
       "  0.3697192370891571,\n",
       "  1.2497334480285645,\n",
       "  0.15120016038417816,\n",
       "  -0.6102544069290161,\n",
       "  0.33998918533325195,\n",
       "  1.1474957466125488],\n",
       " [1.3214415311813354,\n",
       "  0.9719683527946472,\n",
       "  1.158975601196289,\n",
       "  1.5396783351898193,\n",
       "  1.1528315544128418,\n",
       "  1.0233190059661865,\n",
       "  0.33829182386398315,\n",
       "  0.14428164064884186],\n",
       " [0.6855496764183044,\n",
       "  0.8581211566925049,\n",
       "  0.9151592254638672,\n",
       "  1.5470203161239624,\n",
       "  0.5008871555328369,\n",
       "  1.2970519065856934,\n",
       "  0.21078725159168243,\n",
       "  0.07237814366817474],\n",
       " [-0.4095434546470642,\n",
       "  1.2031100988388062,\n",
       "  0.6579062938690186,\n",
       "  0.5500364303588867,\n",
       "  1.3368297815322876,\n",
       "  0.5007888674736023,\n",
       "  0.1778034120798111,\n",
       "  0.4480767250061035],\n",
       " [0.8784123063087463,\n",
       "  0.19292566180229187,\n",
       "  0.37475651502609253,\n",
       "  0.9341398477554321,\n",
       "  0.18644528090953827,\n",
       "  1.6090587377548218,\n",
       "  0.552269458770752,\n",
       "  0.7758820652961731],\n",
       " [0.23938561975955963,\n",
       "  1.1857229471206665,\n",
       "  0.352092444896698,\n",
       "  0.9630482196807861,\n",
       "  0.8108285069465637,\n",
       "  0.8178543448448181,\n",
       "  0.18648366630077362,\n",
       "  0.5783163905143738],\n",
       " [0.5373773574829102,\n",
       "  0.7675755023956299,\n",
       "  1.77919602394104,\n",
       "  0.6773523688316345,\n",
       "  0.5908665060997009,\n",
       "  0.7050622701644897,\n",
       "  1.8510714769363403,\n",
       "  0.6173234581947327],\n",
       " [0.8384829759597778,\n",
       "  0.8069192171096802,\n",
       "  0.602565348148346,\n",
       "  0.7172342538833618,\n",
       "  0.729820191860199,\n",
       "  1.1484909057617188,\n",
       "  1.2924789190292358,\n",
       "  1.4315375089645386],\n",
       " [1.1782653331756592,\n",
       "  0.8054063320159912,\n",
       "  0.31589001417160034,\n",
       "  1.7839341163635254,\n",
       "  -0.7172083854675293,\n",
       "  0.7137279510498047,\n",
       "  0.2682870328426361,\n",
       "  0.2803722620010376],\n",
       " [0.7248608469963074,\n",
       "  -0.26130107045173645,\n",
       "  2.0461912155151367,\n",
       "  0.7224082350730896,\n",
       "  0.7410879135131836,\n",
       "  0.14208215475082397,\n",
       "  0.8777621388435364,\n",
       "  -0.9385627508163452],\n",
       " [-0.30632659792900085,\n",
       "  0.9808575510978699,\n",
       "  0.7295591831207275,\n",
       "  0.2088092416524887,\n",
       "  0.019872846081852913,\n",
       "  1.2050191164016724,\n",
       "  -0.3474351167678833,\n",
       "  1.17479407787323],\n",
       " [0.20904719829559326,\n",
       "  0.45505788922309875,\n",
       "  0.28210440278053284,\n",
       "  -0.009509935975074768,\n",
       "  -1.037472128868103,\n",
       "  1.1277707815170288,\n",
       "  1.220852255821228,\n",
       "  0.15451034903526306],\n",
       " [0.5883464217185974,\n",
       "  0.29319003224372864,\n",
       "  -0.060440342873334885,\n",
       "  -0.055946122854948044,\n",
       "  0.09724418073892593,\n",
       "  0.9549344778060913,\n",
       "  0.43950051069259644,\n",
       "  1.161268711090088],\n",
       " [1.3197003602981567,\n",
       "  1.2076330184936523,\n",
       "  -0.003703589551150799,\n",
       "  0.8354045152664185,\n",
       "  -0.425660640001297,\n",
       "  1.4657313823699951,\n",
       "  0.47799360752105713,\n",
       "  1.3319497108459473],\n",
       " [0.5192162990570068,\n",
       "  0.8687041997909546,\n",
       "  0.008965592831373215,\n",
       "  0.4918502867221832,\n",
       "  0.36339232325553894,\n",
       "  0.2682635188102722,\n",
       "  -0.20460271835327148,\n",
       "  0.35946759581565857],\n",
       " [0.2912704348564148,\n",
       "  0.12361277639865875,\n",
       "  0.17179493606090546,\n",
       "  0.6149757504463196,\n",
       "  0.6315135359764099,\n",
       "  0.4211508333683014,\n",
       "  1.3767644166946411,\n",
       "  1.653198480606079],\n",
       " [-0.05475441366434097,\n",
       "  0.6019850969314575,\n",
       "  1.157862663269043,\n",
       "  0.5417839288711548,\n",
       "  1.4341111183166504,\n",
       "  0.5891523361206055,\n",
       "  0.8009392619132996,\n",
       "  0.058924514800310135],\n",
       " [1.4671003818511963,\n",
       "  0.7696497440338135,\n",
       "  0.04928953945636749,\n",
       "  -0.18722759187221527,\n",
       "  0.6454852819442749,\n",
       "  0.6064060926437378,\n",
       "  -0.3688652217388153,\n",
       "  1.315749168395996],\n",
       " [0.42179441452026367,\n",
       "  0.6917396187782288,\n",
       "  0.5397831797599792,\n",
       "  1.955932855606079,\n",
       "  0.29281508922576904,\n",
       "  0.9329748153686523,\n",
       "  -0.5908997058868408,\n",
       "  -0.5207396149635315],\n",
       " [0.750171422958374,\n",
       "  0.48494982719421387,\n",
       "  0.2705206573009491,\n",
       "  0.8532837629318237,\n",
       "  0.7773425579071045,\n",
       "  1.7942534685134888,\n",
       "  1.4432263374328613,\n",
       "  0.5014457702636719],\n",
       " [0.3951336145401001,\n",
       "  0.9864742159843445,\n",
       "  0.8481379747390747,\n",
       "  1.2072962522506714,\n",
       "  -0.27603867650032043,\n",
       "  -0.05330312252044678,\n",
       "  0.19482672214508057,\n",
       "  1.1962283849716187],\n",
       " [0.04070121422410011,\n",
       "  1.8823387622833252,\n",
       "  0.30890876054763794,\n",
       "  0.40606430172920227,\n",
       "  1.9467653036117554,\n",
       "  1.2084542512893677,\n",
       "  0.004305973183363676,\n",
       "  -0.0805482342839241],\n",
       " [0.4816119074821472,\n",
       "  0.8144100308418274,\n",
       "  0.8388530015945435,\n",
       "  0.26400429010391235,\n",
       "  1.061220407485962,\n",
       "  0.4531838297843933,\n",
       "  -0.4124085307121277,\n",
       "  0.6626724004745483],\n",
       " [1.070812463760376,\n",
       "  -0.1101609468460083,\n",
       "  0.6500921249389648,\n",
       "  0.513896644115448,\n",
       "  1.3457449674606323,\n",
       "  0.9877849817276001,\n",
       "  -0.6804384589195251,\n",
       "  0.3385601043701172],\n",
       " [0.6764967441558838,\n",
       "  0.10054601728916168,\n",
       "  0.09597928076982498,\n",
       "  0.7694987654685974,\n",
       "  0.2923740744590759,\n",
       "  1.0086615085601807,\n",
       "  0.07823382318019867,\n",
       "  0.04339151456952095],\n",
       " [0.8498806357383728,\n",
       "  1.1535964012145996,\n",
       "  0.9463326930999756,\n",
       "  0.47997772693634033,\n",
       "  1.004679799079895,\n",
       "  0.9928759336471558,\n",
       "  0.5152087211608887,\n",
       "  0.9316871166229248],\n",
       " [-0.05526696890592575,\n",
       "  1.2278984785079956,\n",
       "  0.4096958637237549,\n",
       "  -0.44681990146636963,\n",
       "  0.2940526604652405,\n",
       "  0.7505502700805664,\n",
       "  0.8732110857963562,\n",
       "  2.1323494911193848],\n",
       " [0.7354442477226257,\n",
       "  1.1341750621795654,\n",
       "  1.1794847249984741,\n",
       "  0.7649127244949341,\n",
       "  1.785014271736145,\n",
       "  0.6644093990325928,\n",
       "  0.9591809511184692,\n",
       "  0.606896698474884],\n",
       " [0.7801209092140198,\n",
       "  1.1253390312194824,\n",
       "  1.2356977462768555,\n",
       "  -0.22753582894802094,\n",
       "  0.4800240695476532,\n",
       "  1.3247133493423462,\n",
       "  0.48555701971054077,\n",
       "  -0.16997872292995453],\n",
       " [0.6628124117851257,\n",
       "  1.1852589845657349,\n",
       "  2.192808151245117,\n",
       "  0.23406760394573212,\n",
       "  0.1030799075961113,\n",
       "  0.582740068435669,\n",
       "  -0.6167315244674683,\n",
       "  -0.04028385877609253],\n",
       " [-0.010954015888273716,\n",
       "  1.038774847984314,\n",
       "  0.8577556610107422,\n",
       "  1.1882089376449585,\n",
       "  0.650393009185791,\n",
       "  -0.9517457485198975,\n",
       "  0.9435125589370728,\n",
       "  0.6280628442764282],\n",
       " [1.6762473583221436,\n",
       "  0.3327704966068268,\n",
       "  0.2545875608921051,\n",
       "  1.5098705291748047,\n",
       "  1.0780363082885742,\n",
       "  0.9733999967575073,\n",
       "  -0.48176679015159607,\n",
       "  1.6776777505874634],\n",
       " [1.5276129245758057,\n",
       "  0.24758389592170715,\n",
       "  0.45884865522384644,\n",
       "  1.42098069190979,\n",
       "  0.06954687833786011,\n",
       "  0.38857394456863403,\n",
       "  0.7807379961013794,\n",
       "  0.33969971537590027],\n",
       " [0.8601626753807068,\n",
       "  0.09213321655988693,\n",
       "  0.8481763601303101,\n",
       "  0.2543766498565674,\n",
       "  -0.10375819355249405,\n",
       "  0.6901337504386902,\n",
       "  -0.3980754315853119,\n",
       "  1.0302845239639282],\n",
       " [1.8237831592559814,\n",
       "  1.2362412214279175,\n",
       "  0.8986225128173828,\n",
       "  0.6020984053611755,\n",
       "  0.8156884908676147,\n",
       "  0.9590381383895874,\n",
       "  0.30237531661987305,\n",
       "  -0.2997187674045563],\n",
       " [0.9146020412445068,\n",
       "  0.508873701095581,\n",
       "  0.28016042709350586,\n",
       "  0.3422899544239044,\n",
       "  -0.40016594529151917,\n",
       "  0.35678502917289734,\n",
       "  1.8455196619033813,\n",
       "  0.8587247133255005],\n",
       " [-0.47478437423706055,\n",
       "  0.9118719696998596,\n",
       "  1.2835075855255127,\n",
       "  0.8211827874183655,\n",
       "  0.38739511370658875,\n",
       "  0.7812017202377319,\n",
       "  0.25025805830955505,\n",
       "  1.302846908569336],\n",
       " [1.4159207344055176,\n",
       "  0.16711345314979553,\n",
       "  1.434517741203308,\n",
       "  1.3214057683944702,\n",
       "  1.3226855993270874,\n",
       "  1.1861059665679932,\n",
       "  -0.20304208993911743,\n",
       "  1.5786973237991333],\n",
       " [0.3654381334781647,\n",
       "  0.5081825256347656,\n",
       "  0.014794752933084965,\n",
       "  1.148366093635559,\n",
       "  -0.4811488389968872,\n",
       "  -0.25468310713768005,\n",
       "  1.1656373739242554,\n",
       "  0.11195844411849976],\n",
       " [1.4085272550582886,\n",
       "  -0.4037753641605377,\n",
       "  0.9422607421875,\n",
       "  0.13988910615444183,\n",
       "  0.050116777420043945,\n",
       "  0.1134689599275589,\n",
       "  0.4285908639431,\n",
       "  0.5250247716903687],\n",
       " [0.44087299704551697,\n",
       "  0.36162152886390686,\n",
       "  1.8734948635101318,\n",
       "  0.5431849956512451,\n",
       "  0.7395663261413574,\n",
       "  1.2482692003250122,\n",
       "  1.1528228521347046,\n",
       "  2.428717851638794],\n",
       " [0.33401569724082947,\n",
       "  0.7288097739219666,\n",
       "  1.1391453742980957,\n",
       "  1.2940435409545898,\n",
       "  0.22970148921012878,\n",
       "  0.19682931900024414,\n",
       "  0.47764045000076294,\n",
       "  -0.5661598443984985],\n",
       " [0.295767217874527,\n",
       "  0.26648056507110596,\n",
       "  0.1995035856962204,\n",
       "  0.049438633024692535,\n",
       "  1.508899450302124,\n",
       "  1.3668992519378662,\n",
       "  -0.4998842477798462,\n",
       "  1.1007957458496094],\n",
       " [1.0798485279083252,\n",
       "  1.6273581981658936,\n",
       "  0.42707160115242004,\n",
       "  0.6443476676940918,\n",
       "  0.8067579865455627,\n",
       "  2.0519936084747314,\n",
       "  -0.2740725576877594,\n",
       "  0.5505490303039551],\n",
       " [0.7769162654876709,\n",
       "  -0.00224693538621068,\n",
       "  0.03957480192184448,\n",
       "  0.906862735748291,\n",
       "  0.3893915116786957,\n",
       "  0.8970329165458679,\n",
       "  1.2255879640579224,\n",
       "  0.08437921851873398],\n",
       " [-0.14571775496006012,\n",
       "  0.966600775718689,\n",
       "  0.4345952570438385,\n",
       "  0.8231303691864014,\n",
       "  0.5031105875968933,\n",
       "  0.21177180111408234,\n",
       "  1.299818515777588,\n",
       "  0.8648117780685425],\n",
       " [0.7784526348114014,\n",
       "  0.27712348103523254,\n",
       "  0.885769784450531,\n",
       "  1.1886438131332397,\n",
       "  2.076601982116699,\n",
       "  0.8939675688743591,\n",
       "  0.4607243835926056,\n",
       "  1.9945210218429565],\n",
       " [0.5082095861434937,\n",
       "  1.2103382349014282,\n",
       "  -0.6380113363265991,\n",
       "  0.7936013340950012,\n",
       "  2.037707805633545,\n",
       "  0.3043364882469177,\n",
       "  1.1879013776779175,\n",
       "  1.046068549156189],\n",
       " [1.0691053867340088,\n",
       "  0.5895365476608276,\n",
       "  1.2797671556472778,\n",
       "  -0.09345820546150208,\n",
       "  0.21474705636501312,\n",
       "  0.5214143991470337,\n",
       "  0.336204469203949,\n",
       "  0.8931235671043396],\n",
       " [0.680213212966919,\n",
       "  0.1768905073404312,\n",
       "  0.6707269549369812,\n",
       "  0.4638071358203888,\n",
       "  0.039748724550008774,\n",
       "  0.39233896136283875,\n",
       "  1.110952615737915,\n",
       "  2.279693603515625],\n",
       " [1.09459388256073,\n",
       "  -0.1857849806547165,\n",
       "  1.0100557804107666,\n",
       "  -0.1944914609193802,\n",
       "  0.10527966171503067,\n",
       "  -0.2062772661447525,\n",
       "  1.8221585750579834,\n",
       "  1.0229823589324951],\n",
       " [0.7802533507347107,\n",
       "  0.7085791826248169,\n",
       "  -0.27328455448150635,\n",
       "  1.2189863920211792,\n",
       "  1.36111319065094,\n",
       "  0.20935791730880737,\n",
       "  0.16338394582271576,\n",
       "  1.0709962844848633],\n",
       " [-0.40292611718177795,\n",
       "  0.7480397820472717,\n",
       "  -0.45666036009788513,\n",
       "  0.7400232553482056,\n",
       "  0.19062019884586334,\n",
       "  0.1543777734041214,\n",
       "  1.5362282991409302,\n",
       "  -0.6224579215049744],\n",
       " [0.3204180896282196,\n",
       "  0.5564504265785217,\n",
       "  0.755052387714386,\n",
       "  -0.5127205848693848,\n",
       "  0.5845710635185242,\n",
       "  0.429622620344162,\n",
       "  1.6940213441848755,\n",
       "  0.4731834828853607],\n",
       " [0.342244416475296,\n",
       "  1.0649559497833252,\n",
       "  1.361362099647522,\n",
       "  0.26266101002693176,\n",
       "  0.7062035799026489,\n",
       "  1.9544378519058228,\n",
       "  0.44494250416755676,\n",
       "  1.070021629333496],\n",
       " [1.1845399141311646,\n",
       "  1.1421592235565186,\n",
       "  1.2326806783676147,\n",
       "  2.094886064529419,\n",
       "  1.2099201679229736,\n",
       "  1.5248699188232422,\n",
       "  0.6916237473487854,\n",
       "  1.8732826709747314],\n",
       " [0.38201871514320374,\n",
       "  0.3010265827178955,\n",
       "  0.2219436764717102,\n",
       "  0.9328641891479492,\n",
       "  1.0962135791778564,\n",
       "  -0.3582609295845032,\n",
       "  -0.31855231523513794,\n",
       "  -0.16691841185092926],\n",
       " [1.5940206050872803,\n",
       "  1.3722281455993652,\n",
       "  0.9695937633514404,\n",
       "  0.05265501141548157,\n",
       "  0.7468364834785461,\n",
       "  0.9713484048843384,\n",
       "  -0.47122853994369507,\n",
       "  1.077422022819519],\n",
       " [0.6325852870941162,\n",
       "  1.7636756896972656,\n",
       "  0.18441727757453918,\n",
       "  -0.11720021814107895,\n",
       "  -0.0647180825471878,\n",
       "  1.2013732194900513,\n",
       "  1.0529659986495972,\n",
       "  0.9363645315170288],\n",
       " [1.649731159210205,\n",
       "  1.1633039712905884,\n",
       "  1.136684536933899,\n",
       "  -0.8585618734359741,\n",
       "  0.46549010276794434,\n",
       "  0.7601560354232788,\n",
       "  0.33394503593444824,\n",
       "  0.529916524887085],\n",
       " [0.6806090474128723,\n",
       "  0.5332726836204529,\n",
       "  0.5788615942001343,\n",
       "  1.0026912689208984,\n",
       "  0.17414388060569763,\n",
       "  1.128904104232788,\n",
       "  0.46309685707092285,\n",
       "  0.6091936230659485],\n",
       " [0.9383937120437622,\n",
       "  0.35065189003944397,\n",
       "  1.2133915424346924,\n",
       "  0.09652777761220932,\n",
       "  0.23130734264850616,\n",
       "  0.8313935995101929,\n",
       "  1.2097878456115723,\n",
       "  1.3450511693954468],\n",
       " [1.2856299877166748,\n",
       "  1.1726293563842773,\n",
       "  0.6220411062240601,\n",
       "  1.1201838254928589,\n",
       "  0.26893267035484314,\n",
       "  1.825939416885376,\n",
       "  1.1493149995803833,\n",
       "  0.5522738695144653],\n",
       " [1.420372486114502,\n",
       "  1.929799199104309,\n",
       "  0.32010161876678467,\n",
       "  0.6061977744102478,\n",
       "  0.9504846334457397,\n",
       "  -0.5548103451728821,\n",
       "  -0.555100679397583,\n",
       "  0.8679473400115967],\n",
       " [1.2216781377792358,\n",
       "  0.017833834514021873,\n",
       "  -0.19076445698738098,\n",
       "  0.3530937135219574,\n",
       "  1.502084732055664,\n",
       "  0.7612553238868713,\n",
       "  0.5569666028022766,\n",
       "  0.2533722221851349],\n",
       " [0.6908956170082092,\n",
       "  0.7026292681694031,\n",
       "  0.6863276362419128,\n",
       "  0.1765701323747635,\n",
       "  0.11752423644065857,\n",
       "  0.38480836153030396,\n",
       "  0.5867794156074524,\n",
       "  -0.08491615951061249],\n",
       " [0.325395405292511,\n",
       "  1.1613688468933105,\n",
       "  1.7923598289489746,\n",
       "  0.4058593809604645,\n",
       "  1.0975927114486694,\n",
       "  1.2030913829803467,\n",
       "  1.2477843761444092,\n",
       "  0.6254990696907043],\n",
       " [1.6502033472061157,\n",
       "  0.6255520582199097,\n",
       "  0.8132091164588928,\n",
       "  0.7103356719017029,\n",
       "  0.23574237525463104,\n",
       "  0.7412241697311401,\n",
       "  0.8604669570922852,\n",
       "  0.16620051860809326],\n",
       " [0.8607344627380371,\n",
       "  0.5126792192459106,\n",
       "  0.8949565887451172,\n",
       "  -0.8624919056892395,\n",
       "  0.6184847354888916,\n",
       "  0.12827281653881073,\n",
       "  0.5515650510787964,\n",
       "  0.7814645767211914],\n",
       " [1.2509520053863525,\n",
       "  1.8265074491500854,\n",
       "  0.913383424282074,\n",
       "  0.320530503988266,\n",
       "  0.04232092574238777,\n",
       "  0.6731711030006409,\n",
       "  1.3651885986328125,\n",
       "  0.062046606093645096],\n",
       " [0.08859376609325409,\n",
       "  1.5717859268188477,\n",
       "  0.5105108618736267,\n",
       "  0.9112561941146851,\n",
       "  0.013352496549487114,\n",
       "  -0.15414656698703766,\n",
       "  0.165570467710495,\n",
       "  1.1486862897872925],\n",
       " [0.002404895145446062,\n",
       "  -1.020896315574646,\n",
       "  1.1896462440490723,\n",
       "  1.174106240272522,\n",
       "  1.0812612771987915,\n",
       "  -0.43598777055740356,\n",
       "  0.359160840511322,\n",
       "  0.43582600355148315],\n",
       " [1.2849513292312622,\n",
       "  0.9163972735404968,\n",
       "  0.6152085065841675,\n",
       "  -0.3441660404205322,\n",
       "  1.0357708930969238,\n",
       "  0.3024398684501648,\n",
       "  0.8666316866874695,\n",
       "  1.192746639251709],\n",
       " [0.7207927107810974,\n",
       "  0.38642406463623047,\n",
       "  0.6963471174240112,\n",
       "  0.8221131563186646,\n",
       "  -0.5168943405151367,\n",
       "  -0.015445951372385025,\n",
       "  1.347639560699463,\n",
       "  1.7618341445922852],\n",
       " [2.1668777465820312,\n",
       "  -0.24251900613307953,\n",
       "  0.29317954182624817,\n",
       "  -0.25726935267448425,\n",
       "  0.673160195350647,\n",
       "  1.4945576190948486,\n",
       "  1.853124737739563,\n",
       "  1.1555931568145752],\n",
       " [1.703830361366272,\n",
       "  0.9154699444770813,\n",
       "  1.2690119743347168,\n",
       "  0.2828417122364044,\n",
       "  1.1332813501358032,\n",
       "  0.932121753692627,\n",
       "  1.6745953559875488,\n",
       "  0.11623423546552658],\n",
       " [0.7360759377479553,\n",
       "  0.6206526756286621,\n",
       "  -0.3077438175678253,\n",
       "  1.6071828603744507,\n",
       "  2.2032573223114014,\n",
       "  0.5375311374664307,\n",
       "  0.7111477851867676,\n",
       "  0.6018490195274353],\n",
       " [1.0654525756835938,\n",
       "  0.7688785791397095,\n",
       "  0.5586860775947571,\n",
       "  0.9650017023086548,\n",
       "  0.9072736501693726,\n",
       "  0.9018439054489136,\n",
       "  -0.22253873944282532,\n",
       "  0.36667880415916443],\n",
       " [1.2405452728271484,\n",
       "  1.0191833972930908,\n",
       "  -0.005214673932641745,\n",
       "  1.040305733680725,\n",
       "  0.5613710880279541,\n",
       "  0.5602118968963623,\n",
       "  0.7575920820236206,\n",
       "  -0.35897281765937805],\n",
       " [0.5569193363189697,\n",
       "  1.3551079034805298,\n",
       "  0.15984205901622772,\n",
       "  0.2479133903980255,\n",
       "  0.7340414524078369,\n",
       "  1.3456863164901733,\n",
       "  1.3751288652420044,\n",
       "  0.7270274758338928],\n",
       " [0.6787663102149963,\n",
       "  0.9594399929046631,\n",
       "  0.5937714576721191,\n",
       "  0.1389978677034378,\n",
       "  0.3654891848564148,\n",
       "  0.3847017288208008,\n",
       "  1.3676127195358276,\n",
       "  0.8723689913749695],\n",
       " [1.132835865020752,\n",
       "  -0.9039286971092224,\n",
       "  0.5807647705078125,\n",
       "  1.483508586883545,\n",
       "  1.7751233577728271,\n",
       "  -0.04039357602596283,\n",
       "  0.12359971553087234,\n",
       "  0.5862955451011658],\n",
       " [1.40657639503479,\n",
       "  0.588105320930481,\n",
       "  0.5496704578399658,\n",
       "  2.0465126037597656,\n",
       "  0.35773658752441406,\n",
       "  0.7202553749084473,\n",
       "  0.509002685546875,\n",
       "  0.7192836403846741],\n",
       " [0.3304469883441925,\n",
       "  0.6458250880241394,\n",
       "  0.9772236347198486,\n",
       "  1.4723252058029175,\n",
       "  -0.03490195795893669,\n",
       "  1.0204366445541382,\n",
       "  0.920631468296051,\n",
       "  0.6667734980583191],\n",
       " [0.00522497808560729,\n",
       "  1.0592433214187622,\n",
       "  0.2269735038280487,\n",
       "  0.1800757646560669,\n",
       "  1.177268385887146,\n",
       "  0.8310421705245972,\n",
       "  -0.0851120725274086,\n",
       "  0.5035482048988342],\n",
       " [0.2975998520851135,\n",
       "  0.8064444661140442,\n",
       "  -0.0661138966679573,\n",
       "  -0.8295859694480896,\n",
       "  0.7061238884925842,\n",
       "  0.8663055300712585,\n",
       "  0.5299340486526489,\n",
       "  0.4862222969532013],\n",
       " [0.7263649702072144,\n",
       "  0.09446768462657928,\n",
       "  0.8933952450752258,\n",
       "  -0.4497760534286499,\n",
       "  0.8058443069458008,\n",
       "  0.2425367385149002,\n",
       "  1.396353840827942,\n",
       "  0.563052237033844],\n",
       " [0.3839539885520935,\n",
       "  0.7282782196998596,\n",
       "  1.212318778038025,\n",
       "  0.4261894226074219,\n",
       "  0.07833203673362732,\n",
       "  1.0120736360549927,\n",
       "  -0.18252773582935333,\n",
       "  1.3580633401870728],\n",
       " [0.6570674180984497,\n",
       "  0.6796287298202515,\n",
       "  -0.49393969774246216,\n",
       "  -0.2153170108795166,\n",
       "  0.3603777289390564,\n",
       "  0.137309730052948,\n",
       "  1.3760960102081299,\n",
       "  0.9498162269592285],\n",
       " [1.0676186084747314,\n",
       "  2.209193229675293,\n",
       "  0.7915538549423218,\n",
       "  0.02912929654121399,\n",
       "  0.2635575234889984,\n",
       "  1.1975409984588623,\n",
       "  0.15380337834358215,\n",
       "  -0.27338406443595886],\n",
       " [0.6681488752365112,\n",
       "  0.424796998500824,\n",
       "  -0.021005665883421898,\n",
       "  0.9207539558410645,\n",
       "  0.8836610913276672,\n",
       "  0.45215198397636414,\n",
       "  0.9294887781143188,\n",
       "  0.409376323223114],\n",
       " [0.4538514316082001,\n",
       "  0.00935031846165657,\n",
       "  -0.2988351881504059,\n",
       "  -0.3043774962425232,\n",
       "  0.36880621314048767,\n",
       "  0.8153471946716309,\n",
       "  1.0106143951416016,\n",
       "  1.275555968284607],\n",
       " [0.726807713508606,\n",
       "  1.5195404291152954,\n",
       "  0.921931266784668,\n",
       "  2.021548271179199,\n",
       "  1.0035730600357056,\n",
       "  0.48184406757354736,\n",
       "  0.5714308619499207,\n",
       "  2.3149890899658203],\n",
       " [0.10742609202861786,\n",
       "  0.8301975131034851,\n",
       "  0.055366963148117065,\n",
       "  0.671789824962616,\n",
       "  0.6173340082168579,\n",
       "  0.3647671639919281,\n",
       "  1.0987956523895264,\n",
       "  -0.11046690493822098],\n",
       " [1.8518093824386597,\n",
       "  1.4940919876098633,\n",
       "  0.46032124757766724,\n",
       "  0.6791858077049255,\n",
       "  0.7957040071487427,\n",
       "  -0.1715833693742752,\n",
       "  1.2952808141708374,\n",
       "  -0.22201302647590637],\n",
       " [1.4590705633163452,\n",
       "  -0.024394001811742783,\n",
       "  0.8681504130363464,\n",
       "  1.3634161949157715,\n",
       "  1.2020343542099,\n",
       "  0.8399321436882019,\n",
       "  0.7074353098869324,\n",
       "  1.1966806650161743],\n",
       " [1.206592082977295,\n",
       "  1.5290743112564087,\n",
       "  1.4625062942504883,\n",
       "  1.3183199167251587,\n",
       "  0.8079599738121033,\n",
       "  1.2530597448349,\n",
       "  0.2964257597923279,\n",
       "  1.7012945413589478],\n",
       " [0.23744837939739227,\n",
       "  -0.40892449021339417,\n",
       "  0.45564913749694824,\n",
       "  0.9539667367935181,\n",
       "  0.45330315828323364,\n",
       "  0.6427394151687622,\n",
       "  2.1651062965393066,\n",
       "  0.45300400257110596],\n",
       " [1.1088565587997437,\n",
       "  1.0284892320632935,\n",
       "  0.8455905914306641,\n",
       "  0.44362688064575195,\n",
       "  0.20423926413059235,\n",
       "  1.4118196964263916,\n",
       "  0.6883041858673096,\n",
       "  1.060295820236206],\n",
       " [0.11269007623195648,\n",
       "  -0.4214974343776703,\n",
       "  1.3352781534194946,\n",
       "  1.374683141708374,\n",
       "  0.24926240742206573,\n",
       "  1.2196691036224365,\n",
       "  0.30621537566185,\n",
       "  1.5557289123535156],\n",
       " [0.5174117684364319,\n",
       "  1.3464765548706055,\n",
       "  0.6041826605796814,\n",
       "  0.8579862117767334,\n",
       "  1.1107572317123413,\n",
       "  0.06005809083580971,\n",
       "  0.1940934956073761,\n",
       "  0.7053118944168091],\n",
       " [-0.13123492896556854,\n",
       "  -0.016795404255390167,\n",
       "  0.06256788969039917,\n",
       "  0.6004949808120728,\n",
       "  1.5595862865447998,\n",
       "  0.2785467505455017,\n",
       "  0.16029894351959229,\n",
       "  0.1422906219959259],\n",
       " [1.4414560794830322,\n",
       "  1.0570769309997559,\n",
       "  0.868442177772522,\n",
       "  0.4539620280265808,\n",
       "  0.08614809066057205,\n",
       "  1.372676968574524,\n",
       "  1.2798688411712646,\n",
       "  1.1097421646118164],\n",
       " [0.2684161067008972,\n",
       "  0.439188152551651,\n",
       "  0.7649790644645691,\n",
       "  1.31251859664917,\n",
       "  1.30439293384552,\n",
       "  0.6975262761116028,\n",
       "  0.9592947363853455,\n",
       "  0.958408772945404],\n",
       " [0.3814082741737366,\n",
       "  0.453965961933136,\n",
       "  0.7196049690246582,\n",
       "  0.42421868443489075,\n",
       "  0.7008553743362427,\n",
       "  0.7288287878036499,\n",
       "  0.6543476581573486,\n",
       "  0.952244758605957],\n",
       " [0.3671402931213379,\n",
       "  0.5315374135971069,\n",
       "  0.4745793640613556,\n",
       "  0.7873765230178833,\n",
       "  0.709794282913208,\n",
       "  0.21623584628105164,\n",
       "  1.0196788311004639,\n",
       "  1.0949370861053467],\n",
       " [0.7354912757873535,\n",
       "  0.6874397397041321,\n",
       "  0.1763080209493637,\n",
       "  0.4025425910949707,\n",
       "  1.027233362197876,\n",
       "  0.39462319016456604,\n",
       "  0.07225216925144196,\n",
       "  0.1704787313938141],\n",
       " [0.6506420373916626,\n",
       "  -0.04041002318263054,\n",
       "  0.1438077688217163,\n",
       "  1.0109241008758545,\n",
       "  0.6514233350753784,\n",
       "  -0.8521170020103455,\n",
       "  1.3378108739852905,\n",
       "  0.1001066043972969],\n",
       " [0.9261475205421448,\n",
       "  0.8001077175140381,\n",
       "  0.09820841997861862,\n",
       "  -0.3343108594417572,\n",
       "  0.04634549841284752,\n",
       "  1.917747139930725,\n",
       "  1.0293104648590088,\n",
       "  -1.0091208219528198],\n",
       " [-0.1385919749736786,\n",
       "  0.7082226276397705,\n",
       "  0.6447237730026245,\n",
       "  0.7746997475624084,\n",
       "  1.6993699073791504,\n",
       "  1.3641350269317627,\n",
       "  0.12773475050926208,\n",
       "  0.5222804546356201],\n",
       " [1.4062235355377197,\n",
       "  0.03826115280389786,\n",
       "  -0.259659081697464,\n",
       "  1.4203596115112305,\n",
       "  0.379154235124588,\n",
       "  0.630120038986206,\n",
       "  -0.18724484741687775,\n",
       "  0.8353606462478638],\n",
       " [1.1043081283569336,\n",
       "  0.7791976928710938,\n",
       "  -0.19114869832992554,\n",
       "  0.11566777527332306,\n",
       "  1.0171705484390259,\n",
       "  0.3010592460632324,\n",
       "  1.053039312362671,\n",
       "  0.4721204936504364],\n",
       " [-0.2952454388141632,\n",
       "  0.6775184273719788,\n",
       "  0.48211121559143066,\n",
       "  0.978454053401947,\n",
       "  0.4149613082408905,\n",
       "  1.0622574090957642,\n",
       "  0.7691749930381775,\n",
       "  0.3581303060054779],\n",
       " [0.24229146540164948,\n",
       "  0.5553103685379028,\n",
       "  -0.8581454753875732,\n",
       "  1.2745834589004517,\n",
       "  -0.8763539791107178,\n",
       "  0.701775074005127,\n",
       "  0.10425778478384018,\n",
       "  0.2401946783065796],\n",
       " [0.8545948266983032,\n",
       "  -0.17451059818267822,\n",
       "  1.2598119974136353,\n",
       "  -0.9395562410354614,\n",
       "  1.396363615989685,\n",
       "  1.4848977327346802,\n",
       "  0.4524111747741699,\n",
       "  -0.35299187898635864],\n",
       " [0.15612494945526123,\n",
       "  -0.3523448705673218,\n",
       "  0.7272168397903442,\n",
       "  0.5041846036911011,\n",
       "  -0.15174543857574463,\n",
       "  0.015094814822077751,\n",
       "  1.5621932744979858,\n",
       "  0.8149137496948242],\n",
       " [1.0070276260375977,\n",
       "  -0.05539850518107414,\n",
       "  0.09244812279939651,\n",
       "  0.6981140971183777,\n",
       "  -0.7125719785690308,\n",
       "  0.9088519215583801,\n",
       "  0.6550078988075256,\n",
       "  -0.3604903817176819],\n",
       " [0.9765928983688354,\n",
       "  0.8855018615722656,\n",
       "  0.252682089805603,\n",
       "  0.20546267926692963,\n",
       "  0.5478521585464478,\n",
       "  1.3398078680038452,\n",
       "  0.8862090110778809,\n",
       "  1.2731441259384155],\n",
       " [0.3954082727432251,\n",
       "  1.588169813156128,\n",
       "  0.3782908618450165,\n",
       "  0.9468039274215698,\n",
       "  1.1624008417129517,\n",
       "  1.2969897985458374,\n",
       "  0.766116201877594,\n",
       "  0.20078040659427643],\n",
       " [0.9564096927642822,\n",
       "  0.5308309197425842,\n",
       "  0.11746668070554733,\n",
       "  0.10570693761110306,\n",
       "  0.4627830386161804,\n",
       "  1.0371869802474976,\n",
       "  0.20262941718101501,\n",
       "  0.3032601773738861],\n",
       " [0.41498973965644836,\n",
       "  0.028873374685645103,\n",
       "  0.18186670541763306,\n",
       "  1.2770318984985352,\n",
       "  -1.0538816452026367,\n",
       "  -0.24003906548023224,\n",
       "  -0.08495943993330002,\n",
       "  1.6389695405960083],\n",
       " [0.2652709186077118,\n",
       "  1.3261181116104126,\n",
       "  0.6337336897850037,\n",
       "  0.8555748462677002,\n",
       "  0.5652663111686707,\n",
       "  2.0087316036224365,\n",
       "  -0.2759217321872711,\n",
       "  -0.0914098247885704],\n",
       " [0.1025044396519661,\n",
       "  2.2304728031158447,\n",
       "  0.5630025863647461,\n",
       "  1.5580294132232666,\n",
       "  1.1487301588058472,\n",
       "  0.4696703255176544,\n",
       "  0.03375399857759476,\n",
       "  0.31965160369873047],\n",
       " [1.1937085390090942,\n",
       "  0.5213587880134583,\n",
       "  1.4633442163467407,\n",
       "  0.8740283846855164,\n",
       "  0.32340532541275024,\n",
       "  0.9041404724121094,\n",
       "  1.054687738418579,\n",
       "  1.65773606300354],\n",
       " [0.7057143449783325,\n",
       "  0.7046194076538086,\n",
       "  1.4697520732879639,\n",
       "  0.958602786064148,\n",
       "  -0.9306812882423401,\n",
       "  0.8857044577598572,\n",
       "  1.0786972045898438,\n",
       "  1.0379245281219482],\n",
       " [0.3645034730434418,\n",
       "  1.3554524183273315,\n",
       "  0.07599811255931854,\n",
       "  1.7383042573928833,\n",
       "  0.24458803236484528,\n",
       "  0.24323605000972748,\n",
       "  1.348103642463684,\n",
       "  1.2751131057739258],\n",
       " [1.204211711883545,\n",
       "  0.7197462320327759,\n",
       "  1.5908452272415161,\n",
       "  1.4398975372314453,\n",
       "  0.09651727974414825,\n",
       "  0.214004248380661,\n",
       "  0.6956046223640442,\n",
       "  0.883601188659668],\n",
       " [0.026900697499513626,\n",
       "  -0.11655552685260773,\n",
       "  0.42784756422042847,\n",
       "  -0.11225815117359161,\n",
       "  0.7499053478240967,\n",
       "  1.6210694313049316,\n",
       "  0.6962728500366211,\n",
       "  0.35159698128700256],\n",
       " [0.5431519746780396,\n",
       "  0.8466474413871765,\n",
       "  1.3336379528045654,\n",
       "  -0.21684546768665314,\n",
       "  0.5499700307846069,\n",
       "  0.13268263638019562,\n",
       "  0.12403228878974915,\n",
       "  0.8910006880760193],\n",
       " [0.7507150173187256,\n",
       "  1.5242588520050049,\n",
       "  1.3608232736587524,\n",
       "  0.5221432447433472,\n",
       "  -0.6173688173294067,\n",
       "  0.8822352886199951,\n",
       "  0.04046197608113289,\n",
       "  -0.35247501730918884],\n",
       " [0.4271627366542816,\n",
       "  0.8299753665924072,\n",
       "  0.4768046736717224,\n",
       "  -0.7315855026245117,\n",
       "  0.45876410603523254,\n",
       "  0.2796204090118408,\n",
       "  0.07700302451848984,\n",
       "  1.0724687576293945]]"
      ]
     },
     "execution_count": 8,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "all_global_losses"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "536c53e8",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[1.2789414210506524,\n",
       " 1.1444351660195289,\n",
       " 0.8576895991058547,\n",
       " 0.9957846266913529,\n",
       " 1.1940623428541408,\n",
       " 1.0515854056410228,\n",
       " 0.9158763950610106,\n",
       " 1.041106713747888,\n",
       " 0.8094246498029879,\n",
       " 1.077270659413497,\n",
       " 1.076344731246516,\n",
       " 0.7892422253842728,\n",
       " 1.1280148443047857,\n",
       " 0.6775278841694938,\n",
       " 0.7447525572334116,\n",
       " 0.914468572791752,\n",
       " 0.7643533884975461,\n",
       " 0.8704223425926917,\n",
       " 0.9459259392632074,\n",
       " 1.0660546684803391,\n",
       " 0.8605685407104281,\n",
       " 0.885949229192707,\n",
       " 1.049452661382768,\n",
       " 0.9209729214600941,\n",
       " 0.8148851552041058,\n",
       " 1.1792700629629054,\n",
       " 0.778919993230087,\n",
       " 1.4497533176476076,\n",
       " 0.9808222186454226,\n",
       " 1.0706228041210135,\n",
       " 1.0902701643237604,\n",
       " 0.8993412355990054,\n",
       " 0.9618567859269602,\n",
       " 1.016686919203627,\n",
       " 1.0493712189130104,\n",
       " 1.208850797059952,\n",
       " 1.1282282507989185,\n",
       " 0.7458944754669243,\n",
       " 1.318647920857263,\n",
       " 1.11380432064123,\n",
       " 0.805378528115097,\n",
       " 0.9872080735358896,\n",
       " 1.1021752771341824,\n",
       " 1.0853954444694542,\n",
       " 1.1937132611504246,\n",
       " 1.237953694307133,\n",
       " 0.7308603683823252,\n",
       " 0.9978174070821546,\n",
       " 0.840081881570296,\n",
       " 0.9509966427483894,\n",
       " 1.121900993495651,\n",
       " 0.6898015883219595,\n",
       " 0.8411843318799597,\n",
       " 0.7920940682757776,\n",
       " 1.1260139861552154,\n",
       " 1.0909879397099242,\n",
       " 1.122846237519303,\n",
       " 0.6528747678577818,\n",
       " 1.0732948074922257,\n",
       " 0.5948346773495066,\n",
       " 0.7627993195503463,\n",
       " 1.202169456548781,\n",
       " 1.000030305848548,\n",
       " 1.2233434357896642,\n",
       " 0.7883680032365468,\n",
       " 0.8991554424556865,\n",
       " 1.0498277784676324,\n",
       " 0.9655879590607488,\n",
       " 1.1310002079500192,\n",
       " 0.8231447408561201,\n",
       " 1.3175819371863164,\n",
       " 1.19207907462663,\n",
       " 1.0230348692468323,\n",
       " 0.9516580467151343,\n",
       " 1.0343795565068605,\n",
       " 1.2958321629134768,\n",
       " 1.22973217149893,\n",
       " 1.0101683816248248,\n",
       " 0.771083568919205,\n",
       " 0.8177430397307591,\n",
       " 0.6004059675226182,\n",
       " 0.7502062603480097,\n",
       " 1.1597587815021897,\n",
       " 0.67391946045594,\n",
       " 1.0904954233885598,\n",
       " 0.8942784155406722,\n",
       " 0.899993024838557,\n",
       " 0.8595597584154496,\n",
       " 1.1344230628140748,\n",
       " 0.9236291990583323,\n",
       " 1.0734187237442279,\n",
       " 0.839673313350288,\n",
       " 0.9032077241057525,\n",
       " 0.7524031176712599,\n",
       " 1.156161166444619,\n",
       " 0.9921729334269938,\n",
       " 1.2661944143130506,\n",
       " 0.9944545654754146,\n",
       " 0.8428173358693422,\n",
       " 0.9858365665471526,\n",
       " 1.1550364871331906,\n",
       " 1.004638200745789,\n",
       " 0.76481666544631,\n",
       " 1.199862112551204,\n",
       " 0.9390123638360952,\n",
       " 0.9574517563159382,\n",
       " 1.2705728039894775,\n",
       " 0.7840986656729837,\n",
       " 0.7626064058854152,\n",
       " 1.3137133307455413,\n",
       " 0.8340472143164365,\n",
       " 0.9311650295053677,\n",
       " 1.183079561772522,\n",
       " 0.9402723483590193,\n",
       " 0.933778800618082,\n",
       " 1.4001454662529456,\n",
       " 1.1026053508687774,\n",
       " 1.1238843436836423,\n",
       " 0.9790759283865151,\n",
       " 0.8879207247307959,\n",
       " 0.9448859190571819,\n",
       " 0.5848119171533821,\n",
       " 0.8037870740808079,\n",
       " 1.228855393584556,\n",
       " 1.5870973632160275,\n",
       " 0.5631007711443137,\n",
       " 0.9867132299752102,\n",
       " 1.0291078351725373,\n",
       " 1.0273270862529003,\n",
       " 1.0459751205929508,\n",
       " 1.1676436429310963,\n",
       " 1.2340209961350956,\n",
       " 0.9151137188414783,\n",
       " 0.8763611379234644,\n",
       " 0.7236397750265928,\n",
       " 1.3197058543123044,\n",
       " 1.1732292987683741,\n",
       " 0.7906999224094271,\n",
       " 1.069012867151624,\n",
       " 0.8409908746475121,\n",
       " 0.8521135803535274,\n",
       " 0.9554468654438175,\n",
       " 1.1305227614741191,\n",
       " 1.2052599629478513,\n",
       " 1.3513950803024586,\n",
       " 1.2856081805427775,\n",
       " 1.0283194524364885,\n",
       " 0.9722923066129932,\n",
       " 1.19710315225399,\n",
       " 1.02005624691803,\n",
       " 0.9431069666280019,\n",
       " 1.1658421883590382,\n",
       " 1.0652443383874433,\n",
       " 0.7565169055961083,\n",
       " 0.7010806492484793,\n",
       " 0.8393338748698556,\n",
       " 0.9415658071638927,\n",
       " 0.7516547106573682,\n",
       " 1.0751205910296704,\n",
       " 0.8239083458899201,\n",
       " 0.7661116964665197,\n",
       " 1.424540090914328,\n",
       " 0.8250989881547743,\n",
       " 1.1890674799872425,\n",
       " 1.1901293277602072,\n",
       " 1.4300039219073801,\n",
       " 0.9664609988096625,\n",
       " 1.1676811417977444,\n",
       " 1.0742066760999387,\n",
       " 1.0567402629321248,\n",
       " 0.6091478329616423,\n",
       " 1.2636786586109807,\n",
       " 1.0885532236309774,\n",
       " 0.8882422592246964,\n",
       " 1.0784806322107658,\n",
       " 0.8493340314745355,\n",
       " 0.7796384178698106,\n",
       " 0.8895941286031381,\n",
       " 1.1063750582532885,\n",
       " 0.8702675848323798,\n",
       " 0.9723567845363212,\n",
       " 0.8972549856861003,\n",
       " 0.5716861053344369,\n",
       " 0.8470046986002558,\n",
       " 0.8408002943787669,\n",
       " 0.6036623615966665,\n",
       " 1.0707731257820696,\n",
       " 1.2502567006268857,\n",
       " 0.8842383417419162,\n",
       " 0.7452710513205776,\n",
       " 0.9594335958740883,\n",
       " 1.1712386782685018,\n",
       " 1.2336139380633735,\n",
       " 1.050928258047929,\n",
       " 1.0838444206983695,\n",
       " 1.2681369067831545,\n",
       " 0.9496375791297514,\n",
       " 0.907608328771044,\n",
       " 0.9503862704597822,\n",
       " 0.7008634415507119]"
      ]
     },
     "execution_count": 9,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "all_bsne_losses"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "e9aba55f",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.3391261503361906,\n",
       " 0.4825641392837762,\n",
       " 0.3404793008880216,\n",
       " 0.2596116162601227,\n",
       " 0.33965131266593795,\n",
       " 0.38104282411026924,\n",
       " 0.3234525720974003,\n",
       " 0.28502643089583446,\n",
       " 0.3640787178836963,\n",
       " 0.4135935220684215,\n",
       " 0.3678376255383937,\n",
       " 0.3629521305318253,\n",
       " 0.43393352480875,\n",
       " 0.2796305596446741,\n",
       " 0.3973107638139909,\n",
       " 0.3371190357395379,\n",
       " 0.40820093317510153,\n",
       " 0.3725511833660581,\n",
       " 0.3922140164239236,\n",
       " 0.3327938990833189,\n",
       " 0.3934554020682659,\n",
       " 0.36606304750290813,\n",
       " 0.3516584996458027,\n",
       " 0.5499844444386384,\n",
       " 0.33491104179935505,\n",
       " 0.36500497571601054,\n",
       " 0.5267966671669309,\n",
       " 0.28002518009486954,\n",
       " 0.2401699098460963,\n",
       " 0.3223103010215719,\n",
       " 0.3830649209150805,\n",
       " 0.31730694576212193,\n",
       " 0.24050744972004728,\n",
       " 0.33671014854930614,\n",
       " 0.35246446044068713,\n",
       " 0.3433346243379699,\n",
       " 0.3240796010020749,\n",
       " 0.3320280435489292,\n",
       " 0.451896443269295,\n",
       " 0.28363679239935735,\n",
       " 0.3025990629463825,\n",
       " 0.35483483974444885,\n",
       " 0.2756391402220003,\n",
       " 0.35075039445042844,\n",
       " 0.34833491953149937,\n",
       " 0.21133126109175338,\n",
       " 0.3851313182722308,\n",
       " 0.30165000697536687,\n",
       " 0.34218717443443986,\n",
       " 0.3397929536202288,\n",
       " 0.35125739230162684,\n",
       " 0.33139926720682333,\n",
       " 0.31028941677232197,\n",
       " 0.30564734772780344,\n",
       " 0.25398962138575665,\n",
       " 0.43347547980596884,\n",
       " 0.4991391339763195,\n",
       " 0.37717912167607903,\n",
       " 0.39098672036680515,\n",
       " 0.2801705444564211,\n",
       " 0.22956063851709269,\n",
       " 0.37014283894726724,\n",
       " 0.2938541124175281,\n",
       " 0.3015261368110972,\n",
       " 0.30832574123372103,\n",
       " 0.27880639102486504,\n",
       " 0.3598541372806797,\n",
       " 0.30221039466031435,\n",
       " 0.2812094038619034,\n",
       " 0.29743154855711185,\n",
       " 0.3612334565295734,\n",
       " 0.43120972061700175,\n",
       " 0.4649088499329006,\n",
       " 0.2636718125121534,\n",
       " 0.3926630389988307,\n",
       " 0.3551040424076671,\n",
       " 0.28379100640932275,\n",
       " 0.43158404960573415,\n",
       " 0.26426748349120555,\n",
       " 0.35959925203938936,\n",
       " 0.3001108660439223,\n",
       " 0.32294402677779166,\n",
       " 0.3836277358468423,\n",
       " 0.33951232321472546,\n",
       " 0.4299602778375935,\n",
       " 0.2530278522208997,\n",
       " 0.3627946004992627,\n",
       " 0.3941346768643744,\n",
       " 0.2750238372512638,\n",
       " 0.36128477835706707,\n",
       " 0.3587949321711242,\n",
       " 0.3192298962183874,\n",
       " 0.3886712390298018,\n",
       " 0.3692554016147045,\n",
       " 0.29688128800709984,\n",
       " 0.3439640610118684,\n",
       " 0.28750465434333955,\n",
       " 0.36521236879629937,\n",
       " 0.304848367828101,\n",
       " 0.44283555471826064,\n",
       " 0.277433585823462,\n",
       " 0.35044011299392375,\n",
       " 0.35563747154727454,\n",
       " 0.40759605288433165,\n",
       " 0.35066366596512544,\n",
       " 0.299516785162202,\n",
       " 0.2426472418759541,\n",
       " 0.46178049347426336,\n",
       " 0.36209351685781965,\n",
       " 0.21514450383170777,\n",
       " 0.3547939892879216,\n",
       " 0.39517750425833026,\n",
       " 0.3188478039528695,\n",
       " 0.4005850385374173,\n",
       " 0.3140136344248351,\n",
       " 0.3306698825208173,\n",
       " 0.2963363459277908,\n",
       " 0.5225792966335239,\n",
       " 0.25227946385762673,\n",
       " 0.3293561558610262,\n",
       " 0.28996270541136604,\n",
       " 0.3489063034031628,\n",
       " 0.2662122203327248,\n",
       " 0.3280017634292311,\n",
       " 0.217852065009221,\n",
       " 0.3018088853958081,\n",
       " 0.19760374288867544,\n",
       " 0.3304248722557459,\n",
       " 0.37974389962646593,\n",
       " 0.3996284882017364,\n",
       " 0.39058053301911816,\n",
       " 0.2344028421159783,\n",
       " 0.29198971543113905,\n",
       " 0.31692112420374186,\n",
       " 0.31606246154383144,\n",
       " 0.33733697901738036,\n",
       " 0.4478625219592778,\n",
       " 0.35499172930098766,\n",
       " 0.2622502901170909,\n",
       " 0.3090396951524307,\n",
       " 0.5039234035289539,\n",
       " 0.2216993458911099,\n",
       " 0.4801714536535904,\n",
       " 0.31317188312280775,\n",
       " 0.3479717443404607,\n",
       " 0.4468640654583677,\n",
       " 0.3644098830538208,\n",
       " 0.37041455921838295,\n",
       " 0.3843948023555327,\n",
       " 0.34991271528262646,\n",
       " 0.3508812706677782,\n",
       " 0.3036993051178616,\n",
       " 0.3153992671573596,\n",
       " 0.26923387349965155,\n",
       " 0.3514643722168788,\n",
       " 0.3053040036444985,\n",
       " 0.31448059640842524,\n",
       " 0.3202748187253576,\n",
       " 0.39524398784353987,\n",
       " 0.24048680334786487,\n",
       " 0.34982259185211284,\n",
       " 0.2293319959002328,\n",
       " 0.37069769904190847,\n",
       " 0.41621787363717827,\n",
       " 0.23858862247796608,\n",
       " 0.23309984378142654,\n",
       " 0.3474244193050533,\n",
       " 0.3187783630977503,\n",
       " 0.35770295909468586,\n",
       " 0.3822055244026512,\n",
       " 0.2771784404757257,\n",
       " 0.3050069984484722,\n",
       " 0.25046264367420745,\n",
       " 0.26130795066599377,\n",
       " 0.4283206771860465,\n",
       " 0.39103789640659564,\n",
       " 0.4043649559505328,\n",
       " 0.4552897436227873,\n",
       " 0.3935532691674234,\n",
       " 0.3374456734452343,\n",
       " 0.3909299722004724,\n",
       " 0.34133470144466227,\n",
       " 0.3986969334047794,\n",
       " 0.3493771199245089,\n",
       " 0.43134555402770086,\n",
       " 0.32453876148998,\n",
       " 0.2748665392095416,\n",
       " 0.40838668513981075,\n",
       " 0.41970411121165413,\n",
       " 0.47491466279081496,\n",
       " 0.2985131053633265,\n",
       " 0.36801176214904574,\n",
       " 0.23456274715064987,\n",
       " 0.31213656877123397,\n",
       " 0.2531820342064749,\n",
       " 0.41258340084092904,\n",
       " 0.4940401798614522,\n",
       " 0.38207363561933155,\n",
       " 0.4365370432362368,\n",
       " 0.33958674511460474]"
      ]
     },
     "execution_count": 10,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "epoch_avg_local"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "ab85afd6",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[0.9398152707144618,\n",
       " 0.6618710267357528,\n",
       " 0.517210298217833,\n",
       " 0.7361730104312301,\n",
       " 0.8544110301882029,\n",
       " 0.6705425815307535,\n",
       " 0.5924238229636103,\n",
       " 0.7560802828520536,\n",
       " 0.4453459319192916,\n",
       " 0.6636771373450756,\n",
       " 0.7085071057081223,\n",
       " 0.4262900948524475,\n",
       " 0.6940813194960356,\n",
       " 0.39789732452481985,\n",
       " 0.3474417934194207,\n",
       " 0.5773495370522141,\n",
       " 0.35615245532244444,\n",
       " 0.4978711592266336,\n",
       " 0.5537119228392839,\n",
       " 0.7332607693970203,\n",
       " 0.4671131386421621,\n",
       " 0.5198861816897988,\n",
       " 0.6977941617369652,\n",
       " 0.37098847702145576,\n",
       " 0.4799741134047508,\n",
       " 0.8142650872468948,\n",
       " 0.25212332606315613,\n",
       " 1.1697281375527382,\n",
       " 0.7406523087993264,\n",
       " 0.7483125030994415,\n",
       " 0.70720524340868,\n",
       " 0.5820342898368835,\n",
       " 0.721349336206913,\n",
       " 0.6799767706543207,\n",
       " 0.6969067584723234,\n",
       " 0.865516172721982,\n",
       " 0.8041486497968435,\n",
       " 0.4138664319179952,\n",
       " 0.8667514775879681,\n",
       " 0.8301675282418728,\n",
       " 0.5027794651687145,\n",
       " 0.6323732337914407,\n",
       " 0.826536136912182,\n",
       " 0.7346450500190258,\n",
       " 0.8453783416189253,\n",
       " 1.0266224332153797,\n",
       " 0.34572905011009425,\n",
       " 0.6961674001067877,\n",
       " 0.49789470713585615,\n",
       " 0.6112036891281605,\n",
       " 0.7706436011940241,\n",
       " 0.35840232111513615,\n",
       " 0.5308949151076376,\n",
       " 0.4864467205479741,\n",
       " 0.8720243647694588,\n",
       " 0.6575124599039555,\n",
       " 0.6237071035429835,\n",
       " 0.2756956461817026,\n",
       " 0.6823080871254206,\n",
       " 0.3146641328930855,\n",
       " 0.5332386810332537,\n",
       " 0.8320266176015139,\n",
       " 0.7061761934310198,\n",
       " 0.9218172989785671,\n",
       " 0.48004226200282574,\n",
       " 0.6203490514308214,\n",
       " 0.6899736411869526,\n",
       " 0.6633775644004345,\n",
       " 0.8497908040881157,\n",
       " 0.5257131922990084,\n",
       " 0.956348480656743,\n",
       " 0.7608693540096283,\n",
       " 0.5581260193139315,\n",
       " 0.687986234202981,\n",
       " 0.6417165175080299,\n",
       " 0.9407281205058098,\n",
       " 0.9459411650896072,\n",
       " 0.5785843320190907,\n",
       " 0.5068160854279995,\n",
       " 0.45814378769136965,\n",
       " 0.30029510147869587,\n",
       " 0.4272622335702181,\n",
       " 0.7761310456553474,\n",
       " 0.3344071372412145,\n",
       " 0.6605351455509663,\n",
       " 0.6412505633197725,\n",
       " 0.5371984243392944,\n",
       " 0.465425081551075,\n",
       " 0.8593992255628109,\n",
       " 0.5623444207012653,\n",
       " 0.7146237915731035,\n",
       " 0.5204434171319008,\n",
       " 0.5145364850759506,\n",
       " 0.3831477160565555,\n",
       " 0.8592798784375191,\n",
       " 0.6482088724151254,\n",
       " 0.9786897599697113,\n",
       " 0.6292421966791153,\n",
       " 0.5379689680412412,\n",
       " 0.5430010118288919,\n",
       " 0.8776029013097286,\n",
       " 0.6541980877518654,\n",
       " 0.40917919389903545,\n",
       " 0.792266059666872,\n",
       " 0.5883486978709698,\n",
       " 0.6579349711537361,\n",
       " 1.0279255621135235,\n",
       " 0.3223181721987203,\n",
       " 0.4005128890275955,\n",
       " 1.0985688269138336,\n",
       " 0.47925322502851486,\n",
       " 0.5359875252470374,\n",
       " 0.8642317578196526,\n",
       " 0.539687309821602,\n",
       " 0.6197651661932468,\n",
       " 1.0694755837321281,\n",
       " 0.8062690049409866,\n",
       " 0.6013050470501184,\n",
       " 0.7267964645288885,\n",
       " 0.5585645688697696,\n",
       " 0.6549232136458158,\n",
       " 0.23590561375021935,\n",
       " 0.5375748537480831,\n",
       " 0.9008536301553249,\n",
       " 1.3692452982068062,\n",
       " 0.2612918857485056,\n",
       " 0.7891094870865345,\n",
       " 0.6986829629167914,\n",
       " 0.6475831866264343,\n",
       " 0.6463466323912144,\n",
       " 0.7770631099119782,\n",
       " 0.9996181540191174,\n",
       " 0.6231240034103394,\n",
       " 0.5594400137197226,\n",
       " 0.4075773134827614,\n",
       " 0.9823688752949238,\n",
       " 0.7253667768090963,\n",
       " 0.43570819310843945,\n",
       " 0.806762577034533,\n",
       " 0.5319511794950813,\n",
       " 0.3481901768245734,\n",
       " 0.7337475195527077,\n",
       " 0.6503513078205287,\n",
       " 0.8920880798250437,\n",
       " 1.0034233359619975,\n",
       " 0.8387441150844097,\n",
       " 0.6639095693826675,\n",
       " 0.6018777473946102,\n",
       " 0.8127083498984575,\n",
       " 0.6701435316354036,\n",
       " 0.5922256959602237,\n",
       " 0.8621428832411766,\n",
       " 0.7498450712300837,\n",
       " 0.48728303209645674,\n",
       " 0.3496162770316005,\n",
       " 0.5340298712253571,\n",
       " 0.6270852107554674,\n",
       " 0.43137989193201065,\n",
       " 0.6798766031861305,\n",
       " 0.5834215425420552,\n",
       " 0.4162891046144068,\n",
       " 1.1952080950140953,\n",
       " 0.4544012891128659,\n",
       " 0.7728496063500643,\n",
       " 0.9515407052822411,\n",
       " 1.1969040781259537,\n",
       " 0.6190365795046091,\n",
       " 0.8489027786999941,\n",
       " 0.7165037170052528,\n",
       " 0.6745347385294735,\n",
       " 0.3319693924859166,\n",
       " 0.9586716601625085,\n",
       " 0.8380905799567699,\n",
       " 0.6269343085587025,\n",
       " 0.6501599550247192,\n",
       " 0.45829613506793976,\n",
       " 0.3752734619192779,\n",
       " 0.43430438498035073,\n",
       " 0.712821789085865,\n",
       " 0.5328219113871455,\n",
       " 0.5814268123358488,\n",
       " 0.5559202842414379,\n",
       " 0.17298917192965746,\n",
       " 0.4976275786757469,\n",
       " 0.409454740351066,\n",
       " 0.27912360010668635,\n",
       " 0.7959065865725279,\n",
       " 0.8418700154870749,\n",
       " 0.464534230530262,\n",
       " 0.2703563885297626,\n",
       " 0.6609204905107617,\n",
       " 0.8032269161194563,\n",
       " 0.9990511909127235,\n",
       " 0.7387916892766953,\n",
       " 0.8306623864918947,\n",
       " 0.8555535059422255,\n",
       " 0.45559739926829934,\n",
       " 0.5255346931517124,\n",
       " 0.5138492272235453,\n",
       " 0.36127669643610716]"
      ]
     },
     "execution_count": 11,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "epoch_avg_global"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "1e39558c",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "daeba5bd",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "1cf72980",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "7b5196b4",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "tsne01",
   "language": "python",
   "name": "tsne01"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.11.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
