{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "bc941af1",
   "metadata": {},
   "outputs": [],
   "source": [
    "import pickle\n",
    "import os\n",
    "import random as rd\n",
    "import numpy as np\n",
    "import copy\n",
    "import copy as cp\n",
    "import time\n",
    "import logging, sys, json\n",
    "from datetime import datetime\n",
    "import scipy.sparse as sp\n",
    "from scipy.io import loadmat\n",
    "import torch\n",
    "import torch.nn.functional as F\n",
    "import torch.nn as nn\n",
    "from torch.nn import TransformerEncoder, TransformerEncoderLayer\n",
    "from torch_geometric.utils import to_dense_adj, subgraph\n",
    "from torch.utils.tensorboard import SummaryWriter\n",
    "from sklearn.preprocessing import MinMaxScaler\n",
    "from sklearn.model_selection import train_test_split\n",
    "from sklearn.metrics import f1_score, accuracy_score, recall_score, roc_auc_score, average_precision_score, confusion_matrix\n",
    "\n",
    "filelist = {\n",
    "    'amz_upu': 'amz_upu_adjlists.pickle',\n",
    "    'amz_usu': 'amz_usu_adjlists.pickle',\n",
    "    'amz_uvu': 'amz_uvu_adjlists.pickle',\n",
    "    'yelp_rsr': 'yelp_rsr_adjlists.pickle',\n",
    "    'yelp_rtr': 'yelp_rtr_adjlists.pickle',\n",
    "    'yelp_rur': 'yelp_rur_adjlists.pickle'\n",
    "}\n",
    "\n",
    "file_matrix_prefix = {\n",
    "    'amz_upu': 'amazon_upu_matrix_',\n",
    "    'amz_usu': 'amazon_usu_matrix_',\n",
    "    'amz_uvu': 'amazon_uvu_matrix_',\n",
    "    'yelp_rsr': 'yelpnet_rsr_matrix_decompision_',\n",
    "    'yelp_rtr': 'yelpnet_rtr_matrix_decompision_',\n",
    "    'yelp_rur': 'yelpnet_rur_matrix_decompision_'\n",
    "}\n",
    "\n",
    "\n",
    "def create_node_subgraph(node_idx, feat_data, edge_indexs, device):\n",
    "    \"\"\"\n",
    "    为单个节点创建一阶邻居子图（所有邻居）\n",
    "    \"\"\"\n",
    "    neighbors = set()\n",
    "    for rel_idx in range(len(edge_indexs)):\n",
    "        edge_index = edge_indexs[rel_idx][0].cpu().numpy()\n",
    "        # 找出以中心点为起点的边的终点\n",
    "        rel_neighbors = edge_index[1][edge_index[0] == node_idx].tolist()\n",
    "        neighbors.update(rel_neighbors)\n",
    "\n",
    "    # 移除中心节点自身\n",
    "    neighbors.discard(node_idx)\n",
    "    neighbors = list(neighbors)\n",
    "\n",
    "    # 如果邻居太多，进行随机采样截取\n",
    "    sample_size = 399\n",
    "    if len(neighbors) > sample_size:\n",
    "        neighbors = np.random.choice(neighbors, size=sample_size, replace=False).tolist()\n",
    "\n",
    "    # 构建子图节点列表，确保中心节点是第一个\n",
    "    sub_nodes = [node_idx] + [n for n in neighbors if n != node_idx]\n",
    "\n",
    "    # 构建子图边列表\n",
    "    sub_edge_index = []\n",
    "    for rel_idx in range(len(edge_indexs)):\n",
    "        edge_index = edge_indexs[rel_idx][0].cpu().numpy()\n",
    "        mask = np.isin(edge_index[0], sub_nodes) & np.isin(edge_index[1], sub_nodes)\n",
    "        local_edges = edge_index[:, mask]\n",
    "\n",
    "        # 创建节点映射\n",
    "        node_map = {n: i for i, n in enumerate(sub_nodes)}\n",
    "\n",
    "        # 将全局索引映射到局部索引\n",
    "        if len(local_edges) > 0 and local_edges.size > 0:\n",
    "            src_nodes = [node_map[src] for src in local_edges[0]]\n",
    "            dst_nodes = [node_map[dst] for dst in local_edges[1]]\n",
    "            edge_tensor = torch.tensor([src_nodes, dst_nodes], dtype=torch.long)\n",
    "        else:\n",
    "            # 添加自环确保图不为空\n",
    "            edge_tensor = torch.tensor([[0], [0]], dtype=torch.long)\n",
    "\n",
    "        sub_edge_index.append(edge_tensor.to(device))\n",
    "\n",
    "    # 创建子图数据\n",
    "    subgraph = {\n",
    "        'features': feat_data[sub_nodes].clone(),\n",
    "        'edges': sub_edge_index,\n",
    "        'global_idx': sub_nodes\n",
    "    }\n",
    "\n",
    "    return subgraph\n",
    "\n",
    "\n",
    "def dict_to_edge_index(edge_dict):\n",
    "    source_nodes = []\n",
    "    target_nodes = []\n",
    "    for src, targets in edge_dict.items():\n",
    "        for target in targets:\n",
    "            source_nodes.append(src)\n",
    "            target_nodes.append(target)\n",
    "    edge_index = [source_nodes, target_nodes]\n",
    "    return torch.LongTensor(edge_index)\n",
    "\n",
    "\n",
    "def numpy_array_to_edge_index(np_array):\n",
    "    assert np_array.ndim == 2 and np_array.shape[0] == np_array.shape[1], \"Input must be a square matrix.\"\n",
    "    rows, cols = np.nonzero(np_array)\n",
    "    edge_index = np.vstack((rows, cols))\n",
    "    edge_index_tensor = torch.from_numpy(edge_index).long()\n",
    "    return edge_index_tensor\n",
    "\n",
    "\n",
    "def load_data(data, k=2, prefix=''):\n",
    "    pickle_file = {}\n",
    "    matrix_prefix = {}\n",
    "    for key in filelist:\n",
    "        pickle_file[key] = os.path.join(prefix, filelist[key])\n",
    "        matrix_prefix[key] = os.path.join(prefix, file_matrix_prefix[key])\n",
    "\n",
    "    if data == 'yelp':\n",
    "        data_file = loadmat(os.path.join(prefix, 'YelpChi.mat'))\n",
    "        labels = data_file['label'].flatten()\n",
    "        feat_data = data_file['features'].todense().A\n",
    "        with open(pickle_file['yelp_rur'], 'rb') as file:\n",
    "            relation1 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation1 = dict_to_edge_index(relation1)\n",
    "        relation1_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['yelp_rur'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation1_tree.append(numpy_array_to_edge_index(tree))\n",
    "        with open(pickle_file['yelp_rtr'], 'rb') as file:\n",
    "            relation2 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation2 = dict_to_edge_index(relation2)\n",
    "        relation2_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['yelp_rtr'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation2_tree.append(numpy_array_to_edge_index(tree))\n",
    "        with open(pickle_file['yelp_rsr'], 'rb') as file:\n",
    "            relation3 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation3 = dict_to_edge_index(relation3)\n",
    "        relation3_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['yelp_rsr'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation3_tree.append(numpy_array_to_edge_index(tree))\n",
    "        return [[relation1, relation1_tree], [relation2, relation2_tree],\n",
    "                [relation3, relation3_tree]], feat_data, labels\n",
    "    elif data == 'amazon':\n",
    "        data_file = loadmat(os.path.join(prefix, 'Amazon.mat'))\n",
    "        labels = data_file['label'].flatten()\n",
    "        feat_data = data_file['features'].todense().A\n",
    "        with open(pickle_file['amz_upu'], 'rb') as file:\n",
    "            relation1 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation1 = dict_to_edge_index(relation1)\n",
    "        relation1_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['amz_upu'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation1_tree.append(numpy_array_to_edge_index(tree))\n",
    "        with open(pickle_file['amz_usu'], 'rb') as file:\n",
    "            relation2 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation2 = dict_to_edge_index(relation2)\n",
    "        relation2_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['amz_usu'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation2_tree.append(numpy_array_to_edge_index(tree))\n",
    "        with open(pickle_file['amz_uvu'], 'rb') as file:\n",
    "            relation3 = pickle.load(file)\n",
    "        file.close()\n",
    "        relation3_tree = []\n",
    "        for i in range(1, k + 1):\n",
    "            file_name = '{}{}.pkl'.format(matrix_prefix['amz_uvu'], i)\n",
    "            with open(file_name, 'rb') as file:\n",
    "                tree = pickle.load(file)\n",
    "            file.close()\n",
    "            relation3_tree.append(numpy_array_to_edge_index(tree))\n",
    "        relation3 = dict_to_edge_index(relation3)\n",
    "\n",
    "        return [[relation1, relation1_tree], [relation2, relation2_tree],\n",
    "                [relation3, relation3_tree]], feat_data, labels\n",
    "\n",
    "\n",
    "def normalize(mx):\n",
    "    rowsum = np.array(mx.sum(1)) + 0.01\n",
    "    r_inv = np.power(rowsum, -1).flatten()\n",
    "    r_inv[np.isinf(r_inv)] = 0.\n",
    "    r_mat_inv = sp.diags(r_inv)\n",
    "    mx = r_mat_inv.dot(mx)\n",
    "    return mx\n",
    "\n",
    "\n",
    "def pos_neg_split(nodes, labels):\n",
    "    # 正负样本分割\n",
    "    pos_nodes = []\n",
    "    neg_nodes = cp.deepcopy(nodes)\n",
    "    aux_nodes = cp.deepcopy(nodes)\n",
    "    for idx, label in enumerate(labels):\n",
    "        if label == 1:\n",
    "            pos_nodes.append(aux_nodes[idx])\n",
    "            neg_nodes.remove(aux_nodes[idx])\n",
    "\n",
    "    return pos_nodes, neg_nodes\n",
    "\n",
    "\n",
    "def undersample(pos_nodes, neg_nodes, scale=1):\n",
    "    # 对负样本进行下采样，平衡正负样本数量\n",
    "    aux_nodes = cp.deepcopy(neg_nodes)\n",
    "    aux_nodes = rd.sample(aux_nodes, k=int(len(pos_nodes) * scale))\n",
    "    batch_nodes = pos_nodes + aux_nodes\n",
    "\n",
    "    return batch_nodes\n",
    "\n",
    "\n",
    "def calculate_g_mean(y_true, y_pred):\n",
    "    cm = confusion_matrix(y_true, y_pred)\n",
    "    sensitivities = []\n",
    "    for i in range(len(cm)):\n",
    "        TP = cm[i, i]\n",
    "        FN = cm[i, :].sum() - TP\n",
    "        sensitivity = TP / (TP + FN) if (TP + FN) != 0 else 0\n",
    "        sensitivities.append(sensitivity)\n",
    "    g_mean = np.prod(sensitivities) ** (1 / len(sensitivities))\n",
    "    return g_mean\n",
    "\n",
    "\n",
    "def iterate_batches(indices, batch_size, shuffle=True):\n",
    "    \"\"\"\n",
    "        将索引列表划分为指定大小的批次\n",
    "        :param indices: 样本索引列表\n",
    "        :param batch_size: 每个批次的大小\n",
    "        :param shuffle: 是否打乱顺序\n",
    "        :return: 生成批次索引的迭代器\n",
    "    \"\"\"\n",
    "    if shuffle:\n",
    "        rd.shuffle(indices)\n",
    "    for i in range(0, len(indices), batch_size):\n",
    "        yield indices[i:i + batch_size]\n",
    "        \n",
    "def setup_logger(log_dir='./logs', log_name=None):\n",
    "    \"\"\"返回一个同时写文件与终端的 utf-8 logger\"\"\"\n",
    "    os.makedirs(log_dir, exist_ok=True)\n",
    "    if log_name is None:\n",
    "        log_name = datetime.now().strftime('%Y%m%d-%H%M%S') + '.log'\n",
    "    log_path = os.path.join(log_dir, log_name)\n",
    "\n",
    "    logger = logging.getLogger('BSNE')\n",
    "    logger.setLevel(logging.INFO)\n",
    "    # 防止重复 handler\n",
    "    if not logger.handlers:\n",
    "        # 文件 handler（utf-8）\n",
    "        fh = logging.FileHandler(log_path, encoding='utf-8')\n",
    "        fh.setLevel(logging.INFO)\n",
    "        # 终端 handler\n",
    "        ch = logging.StreamHandler(sys.stdout)\n",
    "        ch.setLevel(logging.INFO)\n",
    "        # 统一格式\n",
    "        fmt = '%(asctime)s | %(levelname)s | %(message)s'\n",
    "        formatter = logging.Formatter(fmt, datefmt='%Y-%m-%d %H:%M:%S')\n",
    "        fh.setFormatter(formatter)\n",
    "        ch.setFormatter(formatter)\n",
    "        logger.addHandler(fh)\n",
    "        logger.addHandler(ch)\n",
    "    return logger\n",
    "\n",
    "def test(idx_eval, y_eval, model, feat_data, edge_indexs, device, batch_size=64):\n",
    "    model.eval()\n",
    "    all_probs = []\n",
    "    all_labels = []\n",
    "\n",
    "    # 分批处理\n",
    "    for batch_centers in iterate_batches(idx_eval, batch_size, shuffle=False):\n",
    "        subgraph_data = []\n",
    "\n",
    "        # 为每个中心节点构建子图\n",
    "        for xi in batch_centers:\n",
    "            subgraph = create_node_subgraph(xi, feat_data, edge_indexs, device)\n",
    "            subgraph_data.append(subgraph)\n",
    "\n",
    "        # 获取中心节点预测\n",
    "        with torch.no_grad():\n",
    "            center_logits, _ = model(subgraph_data)  # [B, 2]\n",
    "            probs = torch.softmax(center_logits, dim=-1)[:, 1]  # 正类概率\n",
    "            all_probs.extend(probs.cpu().numpy())\n",
    "            all_labels.extend([labels[xi] for xi in batch_centers])\n",
    "\n",
    "    # 计算指标\n",
    "    auc_score = roc_auc_score(all_labels, all_probs)\n",
    "    ap_score = average_precision_score(all_labels, all_probs)\n",
    "    pred_labels = (np.array(all_probs) >= 0.5).astype(int)\n",
    "    f1 = f1_score(all_labels, pred_labels, average='macro')\n",
    "    g_mean = calculate_g_mean(all_labels, pred_labels)\n",
    "\n",
    "    return auc_score, ap_score, f1, g_mean\n",
    "\n",
    "\n",
    "class BSNE_Transformer(nn.Module):\n",
    "    def __init__(self, in_feat, out_feat, relation_nums=3, d_model=256,\n",
    "                 nhead=8, num_layers=3, dim_feedforward=256,\n",
    "                 drop_rate=0.5):\n",
    "        super().__init__()\n",
    "        self.relation_nums = relation_nums\n",
    "        self.d_model = d_model\n",
    "        self.nhead = nhead\n",
    "\n",
    "        self.feature_proj = nn.Sequential(\n",
    "            nn.Linear(in_feat, d_model),\n",
    "            nn.LayerNorm(d_model),\n",
    "            nn.ReLU()\n",
    "        )\n",
    "        self.norm = nn.LayerNorm(d_model)\n",
    "\n",
    "        self.relation_encoders = nn.ModuleList([\n",
    "            TransformerEncoder(\n",
    "                TransformerEncoderLayer(\n",
    "                    d_model=d_model,\n",
    "                    nhead=nhead,\n",
    "                    dim_feedforward=dim_feedforward,\n",
    "                    dropout=drop_rate,\n",
    "                    batch_first=True\n",
    "                ),\n",
    "                num_layers=num_layers\n",
    "            ) for _ in range(relation_nums)\n",
    "        ])\n",
    "\n",
    "        self.classifier = nn.Sequential(\n",
    "            nn.Linear(relation_nums * d_model, 512),\n",
    "            nn.ReLU(),\n",
    "            nn.Dropout(drop_rate),\n",
    "            nn.Linear(512, out_feat)\n",
    "        )\n",
    "        for p in self.parameters():\n",
    "            if p.dim() > 1:\n",
    "                nn.init.xavier_uniform_(p)\n",
    "\n",
    "    #         self.node_feature_extractor = nn.Sequential(\n",
    "    #             nn.Linear(relation_nums * d_model, d_model),\n",
    "    #             nn.ReLU(),\n",
    "    #             nn.LayerNorm(d_model)\n",
    "    #         )\n",
    "\n",
    "    def forward(self, subgraph_batch):\n",
    "        # 为每个子图单独处理\n",
    "        center_logits_list = []\n",
    "        center_features_list = []\n",
    "\n",
    "        for sg in subgraph_batch:\n",
    "            # 处理单个子图\n",
    "            features = self.feature_proj(sg['features'].unsqueeze(0))  # [1, num_nodes, d_model]\n",
    "            features = self.norm(features)\n",
    "\n",
    "            # 为每个关系类型单独处理\n",
    "            rel_outputs = []\n",
    "            num_nodes = features.size(1)\n",
    "\n",
    "            for rel_idx in range(self.relation_nums):\n",
    "                # 构建当前关系的邻接矩阵\n",
    "                edge_index = sg['edges'][rel_idx]\n",
    "                adj = torch.zeros(num_nodes, num_nodes,\n",
    "                                  dtype=torch.float, device=features.device)\n",
    "\n",
    "                if edge_index.size(1) > 0:\n",
    "                    src, dst = edge_index\n",
    "                    adj[src, dst] = 1.0\n",
    "\n",
    "                # 添加自环\n",
    "                adj[range(num_nodes), range(num_nodes)] = 1.0\n",
    "\n",
    "                # 创建注意力掩码\n",
    "                adj_mask = adj.masked_fill(adj == 0.0, float('-inf'))\n",
    "                adj_mask = adj_mask.masked_fill(adj == 1.0, 0.0)\n",
    "\n",
    "                # 扩展为多头注意力掩码\n",
    "                adj_mask = adj_mask.unsqueeze(0).unsqueeze(0)  # [1, 1, num_nodes, num_nodes]\n",
    "                adj_mask = adj_mask.expand(1, self.nhead, num_nodes, num_nodes)\n",
    "                adj_mask = adj_mask.reshape(-1, num_nodes, num_nodes)\n",
    "\n",
    "                # 关系编码\n",
    "                encoder_output = self.relation_encoders[rel_idx](\n",
    "                    src=features,\n",
    "                    mask=adj_mask\n",
    "                )\n",
    "                rel_outputs.append(encoder_output)\n",
    "\n",
    "            # 合并多关系特征\n",
    "            combined = torch.cat(rel_outputs, dim=-1)  # [1, num_nodes, rel*d_model]\n",
    "\n",
    "            # 提取中心节点特征（第一个节点）\n",
    "            center_features = combined[:, 0, :]  # [1, rel*d_model]\n",
    "            center_logits = self.classifier(center_features)  # [1, out_feat]\n",
    "            center_logits = F.log_softmax(center_logits, dim=-1)\n",
    "\n",
    "            center_logits_list.append(center_logits)\n",
    "            center_features_list.append(center_features)\n",
    "\n",
    "        # 将结果堆叠为批次\n",
    "        center_logits = torch.cat(center_logits_list, dim=0)\n",
    "        center_features = torch.cat(center_features_list, dim=0)\n",
    "\n",
    "        return center_logits, center_features\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "15650663",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-07-23 15:40:47 | INFO | ============  BSNE Training  ============\n",
      "2025-07-23 15:40:47 | INFO | Args:\n",
      "{\n",
      "  \"dataset\": \"amazon\",\n",
      "  \"weight_decay\": 5e-05,\n",
      "  \"seed\": 76,\n",
      "  \"pretrain_epochs\": 10,\n",
      "  \"max_steps\": 100,\n",
      "  \"sample_size\": 100,\n",
      "  \"loss_threshold\": 0.1,\n",
      "  \"pretrain_lr\": 0.0015,\n",
      "  \"finetune_lr\": 0.0005,\n",
      "  \"batch_size\": 8,\n",
      "  \"num_epochs\": 80,\n",
      "  \"patience\": 30,\n",
      "  \"weight\": 0.6,\n",
      "  \"test_size\": 0.3,\n",
      "  \"val_size\": 0.5,\n",
      "  \"layers_tree\": 7,\n",
      "  \"num_heads\": 4,\n",
      "  \"num_layers\": 2,\n",
      "  \"drop_rate\": 0.5\n",
      "}\n",
      "2025-07-23 15:40:47 | INFO | cuda\n",
      "2025-07-23 15:40:47 | INFO | loading data...\n",
      "2025-07-23 15:41:33 | INFO | \n",
      "=== Starting Pretraining ===\n",
      "2025-07-23 15:41:33 | INFO | \n",
      "=== Pretraining Epoch 0 (Center Node: 268) ===\n",
      "2025-07-23 15:42:31 | INFO | Step 0: local_loss: 3.3976118564605713 | global_loss: 2.958376407623291 | total_loss: 6.355988502502441\n",
      "2025-07-23 15:43:30 | INFO | Step 1: local_loss: 3.7514121532440186 | global_loss: 4.49116849899292 | total_loss: 8.24258041381836\n",
      "2025-07-23 15:44:29 | INFO | Step 2: local_loss: 3.305243492126465 | global_loss: 3.448019027709961 | total_loss: 6.753262519836426\n",
      "2025-07-23 15:45:13 | INFO | Step 3: local_loss: 3.32609224319458 | global_loss: 3.532951831817627 | total_loss: 6.859044075012207\n",
      "2025-07-23 15:46:11 | INFO | Step 4: local_loss: 3.545893430709839 | global_loss: 3.4388039112091064 | total_loss: 6.984697341918945\n",
      "2025-07-23 15:46:55 | INFO | Step 5: local_loss: 3.470038890838623 | global_loss: 3.047813653945923 | total_loss: 6.517852783203125\n",
      "2025-07-23 15:47:53 | INFO | Step 6: local_loss: 3.3708715438842773 | global_loss: 2.803372621536255 | total_loss: 6.174243927001953\n",
      "2025-07-23 15:48:53 | INFO | Step 7: local_loss: 3.2045891284942627 | global_loss: 3.176023006439209 | total_loss: 6.380612373352051\n",
      "2025-07-23 15:49:51 | INFO | Step 8: local_loss: 3.0785648822784424 | global_loss: 3.624314308166504 | total_loss: 6.702878952026367\n",
      "2025-07-23 15:50:36 | INFO | Step 9: local_loss: 3.547133445739746 | global_loss: 3.262030839920044 | total_loss: 6.809164047241211\n",
      "2025-07-23 15:51:20 | INFO | Step 10: local_loss: 3.589249610900879 | global_loss: 2.7019076347351074 | total_loss: 6.291157245635986\n",
      "2025-07-23 15:52:18 | INFO | Step 11: local_loss: 3.3018765449523926 | global_loss: 2.8140571117401123 | total_loss: 6.115933418273926\n",
      "2025-07-23 15:53:18 | INFO | Step 12: local_loss: 3.3358378410339355 | global_loss: 3.2090847492218018 | total_loss: 6.544922828674316\n",
      "2025-07-23 15:54:16 | INFO | Step 13: local_loss: 3.4097371101379395 | global_loss: 3.06600284576416 | total_loss: 6.4757399559021\n",
      "2025-07-23 15:55:00 | INFO | Step 14: local_loss: 3.31318998336792 | global_loss: 2.9017434120178223 | total_loss: 6.214933395385742\n",
      "2025-07-23 15:55:58 | INFO | Step 15: local_loss: 3.414649486541748 | global_loss: 2.897878408432007 | total_loss: 6.312527656555176\n",
      "2025-07-23 15:56:58 | INFO | Step 16: local_loss: 3.129115581512451 | global_loss: 3.154306411743164 | total_loss: 6.283421993255615\n",
      "2025-07-23 15:57:56 | INFO | Step 17: local_loss: 3.783651113510132 | global_loss: 3.179750442504883 | total_loss: 6.963401794433594\n",
      "2025-07-23 15:58:55 | INFO | Step 18: local_loss: 3.313977003097534 | global_loss: 3.1224257946014404 | total_loss: 6.436402797698975\n",
      "2025-07-23 15:59:39 | INFO | Step 19: local_loss: 3.287209987640381 | global_loss: 3.7711024284362793 | total_loss: 7.05831241607666\n",
      "2025-07-23 16:00:37 | INFO | Step 20: local_loss: 3.6459453105926514 | global_loss: 3.1998331546783447 | total_loss: 6.845778465270996\n",
      "2025-07-23 16:01:36 | INFO | Step 21: local_loss: 3.190378427505493 | global_loss: 2.8852789402008057 | total_loss: 6.075657367706299\n",
      "2025-07-23 16:02:20 | INFO | Step 22: local_loss: 3.299184560775757 | global_loss: 4.0826029777526855 | total_loss: 7.381787300109863\n",
      "2025-07-23 16:03:14 | INFO | Step 23: local_loss: 3.6260974407196045 | global_loss: 2.7754435539245605 | total_loss: 6.401540756225586\n",
      "2025-07-23 16:04:15 | INFO | Step 24: local_loss: 3.6629528999328613 | global_loss: 3.1752004623413086 | total_loss: 6.83815336227417\n",
      "2025-07-23 16:05:13 | INFO | Step 25: local_loss: 3.468613624572754 | global_loss: 3.6557743549346924 | total_loss: 7.124387741088867\n",
      "2025-07-23 16:06:12 | INFO | Step 26: local_loss: 3.6629981994628906 | global_loss: 2.9366273880004883 | total_loss: 6.599625587463379\n",
      "2025-07-23 16:07:11 | INFO | Step 27: local_loss: 3.165966749191284 | global_loss: 3.1222620010375977 | total_loss: 6.288228988647461\n",
      "2025-07-23 16:08:10 | INFO | Step 28: local_loss: 3.441293954849243 | global_loss: 2.3363230228424072 | total_loss: 5.77761697769165\n",
      "2025-07-23 16:09:11 | INFO | Step 29: local_loss: 3.0449907779693604 | global_loss: 2.4984660148620605 | total_loss: 5.54345703125\n",
      "2025-07-23 16:10:11 | INFO | Step 30: local_loss: 3.2646431922912598 | global_loss: 3.0069494247436523 | total_loss: 6.271592617034912\n",
      "2025-07-23 16:11:11 | INFO | Step 31: local_loss: 3.465517282485962 | global_loss: 3.821441650390625 | total_loss: 7.286958694458008\n",
      "2025-07-23 16:12:10 | INFO | Step 32: local_loss: 3.499096632003784 | global_loss: 2.8353381156921387 | total_loss: 6.334434509277344\n",
      "2025-07-23 16:13:08 | INFO | Step 33: local_loss: 3.7218282222747803 | global_loss: 3.214857816696167 | total_loss: 6.936686038970947\n",
      "2025-07-23 16:14:08 | INFO | Step 34: local_loss: 3.250433921813965 | global_loss: 2.1998379230499268 | total_loss: 5.4502716064453125\n",
      "2025-07-23 16:15:08 | INFO | Step 35: local_loss: 3.504153251647949 | global_loss: 3.2639899253845215 | total_loss: 6.768143177032471\n",
      "2025-07-23 16:16:07 | INFO | Step 36: local_loss: 3.6076812744140625 | global_loss: 2.9537246227264404 | total_loss: 6.561406135559082\n",
      "2025-07-23 16:17:07 | INFO | Step 37: local_loss: 3.627692461013794 | global_loss: 2.651670217514038 | total_loss: 6.279362678527832\n",
      "2025-07-23 16:18:07 | INFO | Step 38: local_loss: 3.6543872356414795 | global_loss: 2.4667019844055176 | total_loss: 6.121088981628418\n",
      "2025-07-23 16:19:06 | INFO | Step 39: local_loss: 3.5837326049804688 | global_loss: 2.7903497219085693 | total_loss: 6.374082565307617\n",
      "2025-07-23 16:20:04 | INFO | Step 40: local_loss: 3.3482682704925537 | global_loss: 2.252943277359009 | total_loss: 5.6012115478515625\n",
      "2025-07-23 16:21:04 | INFO | Step 41: local_loss: 3.875004291534424 | global_loss: 3.6788477897644043 | total_loss: 7.553852081298828\n",
      "2025-07-23 16:22:02 | INFO | Step 42: local_loss: 3.416315793991089 | global_loss: 2.1627678871154785 | total_loss: 5.579083442687988\n",
      "2025-07-23 16:22:47 | INFO | Step 43: local_loss: 3.3836796283721924 | global_loss: 2.903705358505249 | total_loss: 6.287384986877441\n",
      "2025-07-23 16:23:44 | INFO | Step 44: local_loss: 3.3795597553253174 | global_loss: 2.7781240940093994 | total_loss: 6.157683849334717\n",
      "2025-07-23 16:24:44 | INFO | Step 45: local_loss: 2.894139289855957 | global_loss: 3.2321536540985107 | total_loss: 6.126293182373047\n",
      "2025-07-23 16:25:43 | INFO | Step 46: local_loss: 3.4164748191833496 | global_loss: 3.3078598976135254 | total_loss: 6.724334716796875\n",
      "2025-07-23 16:26:42 | INFO | Step 47: local_loss: 3.5130882263183594 | global_loss: 2.8449158668518066 | total_loss: 6.358004093170166\n",
      "2025-07-23 16:27:40 | INFO | Step 48: local_loss: 2.8139801025390625 | global_loss: 2.7038729190826416 | total_loss: 5.517852783203125\n",
      "2025-07-23 16:28:38 | INFO | Step 49: local_loss: 2.8496062755584717 | global_loss: 2.9605233669281006 | total_loss: 5.810129642486572\n",
      "2025-07-23 16:29:23 | INFO | Step 50: local_loss: 3.3554577827453613 | global_loss: 2.378448486328125 | total_loss: 5.733906269073486\n",
      "2025-07-23 16:30:21 | INFO | Step 51: local_loss: 3.3903753757476807 | global_loss: 2.635340690612793 | total_loss: 6.0257158279418945\n",
      "2025-07-23 16:31:20 | INFO | Step 52: local_loss: 3.24143123626709 | global_loss: 2.453474760055542 | total_loss: 5.694906234741211\n",
      "2025-07-23 16:32:19 | INFO | Step 53: local_loss: 3.0905168056488037 | global_loss: 2.3565123081207275 | total_loss: 5.447029113769531\n",
      "2025-07-23 16:33:18 | INFO | Step 54: local_loss: 3.6915714740753174 | global_loss: 3.097085475921631 | total_loss: 6.788657188415527\n",
      "2025-07-23 16:34:17 | INFO | Step 55: local_loss: 3.3430724143981934 | global_loss: 2.667154312133789 | total_loss: 6.010226726531982\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-07-23 16:35:17 | INFO | Step 56: local_loss: 3.15338397026062 | global_loss: 2.3039979934692383 | total_loss: 5.4573822021484375\n",
      "2025-07-23 16:36:17 | INFO | Step 57: local_loss: 2.829644203186035 | global_loss: 2.2616066932678223 | total_loss: 5.091250896453857\n",
      "2025-07-23 16:37:16 | INFO | Step 58: local_loss: 3.301300048828125 | global_loss: 2.3755953311920166 | total_loss: 5.6768951416015625\n",
      "2025-07-23 16:38:15 | INFO | Step 59: local_loss: 2.9023094177246094 | global_loss: 2.377863883972168 | total_loss: 5.280173301696777\n",
      "2025-07-23 16:38:59 | INFO | Step 60: local_loss: 3.360898971557617 | global_loss: 2.866398572921753 | total_loss: 6.227297782897949\n",
      "2025-07-23 16:39:56 | INFO | Step 61: local_loss: 3.155566453933716 | global_loss: 2.390122175216675 | total_loss: 5.545688629150391\n",
      "2025-07-23 16:40:55 | INFO | Step 62: local_loss: 2.6368401050567627 | global_loss: 3.0113043785095215 | total_loss: 5.648144721984863\n",
      "2025-07-23 16:41:53 | INFO | Step 63: local_loss: 3.6476969718933105 | global_loss: 3.095822811126709 | total_loss: 6.7435197830200195\n",
      "2025-07-23 16:42:38 | INFO | Step 64: local_loss: 3.2039783000946045 | global_loss: 2.7187561988830566 | total_loss: 5.922734260559082\n",
      "2025-07-23 16:43:35 | INFO | Step 65: local_loss: 3.600053548812866 | global_loss: 1.5732849836349487 | total_loss: 5.173338413238525\n",
      "2025-07-23 16:44:35 | INFO | Step 66: local_loss: 3.277409553527832 | global_loss: 2.361937999725342 | total_loss: 5.639347553253174\n",
      "2025-07-23 16:45:36 | INFO | Step 67: local_loss: 3.6521623134613037 | global_loss: 1.9129077196121216 | total_loss: 5.565070152282715\n",
      "2025-07-23 16:46:35 | INFO | Step 68: local_loss: 2.8334567546844482 | global_loss: 1.7285667657852173 | total_loss: 4.562023639678955\n",
      "2025-07-23 16:47:35 | INFO | Step 69: local_loss: 3.13739013671875 | global_loss: 2.8073134422302246 | total_loss: 5.944703578948975\n",
      "2025-07-23 16:48:34 | INFO | Step 70: local_loss: 2.630720853805542 | global_loss: 3.094805955886841 | total_loss: 5.725526809692383\n",
      "2025-07-23 16:49:33 | INFO | Step 71: local_loss: 2.9938697814941406 | global_loss: 2.2146031856536865 | total_loss: 5.208473205566406\n",
      "2025-07-23 16:50:17 | INFO | Step 72: local_loss: 2.9324724674224854 | global_loss: 2.0912907123565674 | total_loss: 5.023763179779053\n",
      "2025-07-23 16:51:15 | INFO | Step 73: local_loss: 3.178954601287842 | global_loss: 2.1566319465637207 | total_loss: 5.3355865478515625\n",
      "2025-07-23 16:52:14 | INFO | Step 74: local_loss: 3.3018486499786377 | global_loss: 2.658277750015259 | total_loss: 5.9601263999938965\n",
      "2025-07-23 16:53:11 | INFO | Step 75: local_loss: 2.9836387634277344 | global_loss: 1.8122152090072632 | total_loss: 4.795854091644287\n",
      "2025-07-23 16:54:09 | INFO | Step 76: local_loss: 2.896097421646118 | global_loss: 1.964163064956665 | total_loss: 4.860260486602783\n",
      "2025-07-23 16:55:07 | INFO | Step 77: local_loss: 3.321059465408325 | global_loss: 2.2706432342529297 | total_loss: 5.591702461242676\n",
      "2025-07-23 16:56:04 | INFO | Step 78: local_loss: 3.0620410442352295 | global_loss: 2.6050870418548584 | total_loss: 5.667128086090088\n",
      "2025-07-23 16:57:02 | INFO | Step 79: local_loss: 3.314866065979004 | global_loss: 2.271479606628418 | total_loss: 5.586345672607422\n",
      "2025-07-23 16:57:59 | INFO | Step 80: local_loss: 3.007715940475464 | global_loss: 2.781162977218628 | total_loss: 5.788878917694092\n",
      "2025-07-23 16:58:57 | INFO | Step 81: local_loss: 3.0784530639648438 | global_loss: 2.5176315307617188 | total_loss: 5.5960845947265625\n",
      "2025-07-23 16:59:55 | INFO | Step 82: local_loss: 2.8485286235809326 | global_loss: 2.7892189025878906 | total_loss: 5.637747764587402\n",
      "2025-07-23 17:00:52 | INFO | Step 83: local_loss: 2.8420674800872803 | global_loss: 1.9691412448883057 | total_loss: 4.811208724975586\n",
      "2025-07-23 17:01:48 | INFO | Step 84: local_loss: 2.9163262844085693 | global_loss: 2.438016653060913 | total_loss: 5.354342937469482\n",
      "2025-07-23 17:02:45 | INFO | Step 85: local_loss: 3.2438530921936035 | global_loss: 1.0817774534225464 | total_loss: 4.3256306648254395\n",
      "2025-07-23 17:03:43 | INFO | Step 86: local_loss: 2.8618714809417725 | global_loss: 2.4655821323394775 | total_loss: 5.32745361328125\n",
      "2025-07-23 17:04:39 | INFO | Step 87: local_loss: 2.9354357719421387 | global_loss: 2.4408624172210693 | total_loss: 5.376297950744629\n",
      "2025-07-23 17:05:35 | INFO | Step 88: local_loss: 3.292196273803711 | global_loss: 3.285482883453369 | total_loss: 6.57767915725708\n",
      "2025-07-23 17:06:31 | INFO | Step 89: local_loss: 3.2414073944091797 | global_loss: 2.0232396125793457 | total_loss: 5.264647006988525\n",
      "2025-07-23 17:07:15 | INFO | Step 90: local_loss: 3.431169033050537 | global_loss: 2.0302274227142334 | total_loss: 5.461396217346191\n",
      "2025-07-23 17:08:11 | INFO | Step 91: local_loss: 3.342494010925293 | global_loss: 2.6060407161712646 | total_loss: 5.948534965515137\n",
      "2025-07-23 17:09:08 | INFO | Step 92: local_loss: 2.842855215072632 | global_loss: 1.6708965301513672 | total_loss: 4.513751983642578\n",
      "2025-07-23 17:10:05 | INFO | Step 93: local_loss: 3.14675235748291 | global_loss: 1.9856144189834595 | total_loss: 5.13236665725708\n",
      "2025-07-23 17:11:03 | INFO | Step 94: local_loss: 3.1780362129211426 | global_loss: 1.5758442878723145 | total_loss: 4.753880500793457\n",
      "2025-07-23 17:12:01 | INFO | Step 95: local_loss: 3.256283760070801 | global_loss: 1.7014962434768677 | total_loss: 4.957779884338379\n",
      "2025-07-23 17:12:59 | INFO | Step 96: local_loss: 3.095906972885132 | global_loss: 1.5530731678009033 | total_loss: 4.648980140686035\n",
      "2025-07-23 17:13:57 | INFO | Step 97: local_loss: 3.044745922088623 | global_loss: 2.0868430137634277 | total_loss: 5.131588935852051\n",
      "2025-07-23 17:14:55 | INFO | Step 98: local_loss: 2.485995054244995 | global_loss: 2.261794328689575 | total_loss: 4.74778938293457\n",
      "2025-07-23 17:15:54 | INFO | Step 99: local_loss: 2.6646478176116943 | global_loss: 1.855879545211792 | total_loss: 4.520527362823486\n",
      "2025-07-23 17:15:54 | INFO | Center node 268 finished at step 100 with loss 4.5205\n",
      "2025-07-23 17:15:54 | INFO | \n",
      "=== Pretraining Epoch 1 (Center Node: 7572) ===\n",
      "2025-07-23 17:16:51 | INFO | Step 0: local_loss: 2.628711462020874 | global_loss: 2.636270046234131 | total_loss: 5.264981269836426\n",
      "2025-07-23 17:17:36 | INFO | Step 1: local_loss: 2.669494867324829 | global_loss: 2.5529861450195312 | total_loss: 5.222480773925781\n",
      "2025-07-23 17:18:35 | INFO | Step 2: local_loss: 2.8329455852508545 | global_loss: 3.008448600769043 | total_loss: 5.841394424438477\n",
      "2025-07-23 17:19:34 | INFO | Step 3: local_loss: 2.9210259914398193 | global_loss: 1.9103822708129883 | total_loss: 4.831408500671387\n",
      "2025-07-23 17:20:33 | INFO | Step 4: local_loss: 2.7659502029418945 | global_loss: 1.870794653892517 | total_loss: 4.636744976043701\n",
      "2025-07-23 17:21:33 | INFO | Step 5: local_loss: 2.4830822944641113 | global_loss: 2.1451170444488525 | total_loss: 4.628199577331543\n",
      "2025-07-23 17:22:33 | INFO | Step 6: local_loss: 2.3204898834228516 | global_loss: 2.3596253395080566 | total_loss: 4.680115222930908\n",
      "2025-07-23 17:23:32 | INFO | Step 7: local_loss: 2.044893980026245 | global_loss: 2.237931966781616 | total_loss: 4.282825946807861\n",
      "2025-07-23 17:24:32 | INFO | Step 8: local_loss: 2.4741408824920654 | global_loss: 1.9886071681976318 | total_loss: 4.462748050689697\n",
      "2025-07-23 17:25:31 | INFO | Step 9: local_loss: 2.4673118591308594 | global_loss: 1.7332066297531128 | total_loss: 4.200518608093262\n",
      "2025-07-23 17:26:30 | INFO | Step 10: local_loss: 2.427629232406616 | global_loss: 2.76731014251709 | total_loss: 5.194939613342285\n",
      "2025-07-23 17:27:30 | INFO | Step 11: local_loss: 2.3642942905426025 | global_loss: 2.07205867767334 | total_loss: 4.436352729797363\n",
      "2025-07-23 17:28:29 | INFO | Step 12: local_loss: 2.5821850299835205 | global_loss: 1.452968716621399 | total_loss: 4.035153865814209\n",
      "2025-07-23 17:29:28 | INFO | Step 13: local_loss: 2.3243348598480225 | global_loss: 2.10428786277771 | total_loss: 4.428622722625732\n",
      "2025-07-23 17:30:27 | INFO | Step 14: local_loss: 2.5209741592407227 | global_loss: 2.4798359870910645 | total_loss: 5.000810146331787\n",
      "2025-07-23 17:31:27 | INFO | Step 15: local_loss: 2.639409065246582 | global_loss: 1.9893049001693726 | total_loss: 4.628714084625244\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-07-23 17:32:27 | INFO | Step 16: local_loss: 2.4450700283050537 | global_loss: 2.9120874404907227 | total_loss: 5.3571577072143555\n",
      "2025-07-23 17:33:26 | INFO | Step 17: local_loss: 2.34066104888916 | global_loss: 2.096808910369873 | total_loss: 4.437469959259033\n",
      "2025-07-23 17:34:24 | INFO | Step 18: local_loss: 2.2654194831848145 | global_loss: 1.6710968017578125 | total_loss: 3.936516284942627\n",
      "2025-07-23 17:35:23 | INFO | Step 19: local_loss: 2.078925848007202 | global_loss: 2.4352235794067383 | total_loss: 4.5141496658325195\n",
      "2025-07-23 17:36:24 | INFO | Step 20: local_loss: 2.319340705871582 | global_loss: 1.4738234281539917 | total_loss: 3.7931642532348633\n",
      "2025-07-23 17:37:22 | INFO | Step 21: local_loss: 1.9568346738815308 | global_loss: 1.2017576694488525 | total_loss: 3.1585922241210938\n",
      "2025-07-23 17:38:24 | INFO | Step 22: local_loss: 2.4847161769866943 | global_loss: 2.1760406494140625 | total_loss: 4.660757064819336\n",
      "2025-07-23 17:39:24 | INFO | Step 23: local_loss: 2.475365400314331 | global_loss: 1.807978868484497 | total_loss: 4.283344268798828\n",
      "2025-07-23 17:40:22 | INFO | Step 24: local_loss: 2.491476058959961 | global_loss: 1.7147289514541626 | total_loss: 4.206204891204834\n",
      "2025-07-23 17:41:20 | INFO | Step 25: local_loss: 2.2101945877075195 | global_loss: 1.3410826921463013 | total_loss: 3.5512771606445312\n",
      "2025-07-23 17:42:19 | INFO | Step 26: local_loss: 2.4664056301116943 | global_loss: 1.4726977348327637 | total_loss: 3.939103364944458\n",
      "2025-07-23 17:43:19 | INFO | Step 27: local_loss: 2.176400899887085 | global_loss: 1.263304352760315 | total_loss: 3.4397053718566895\n",
      "2025-07-23 17:44:19 | INFO | Step 28: local_loss: 1.9566326141357422 | global_loss: 1.6071200370788574 | total_loss: 3.5637526512145996\n",
      "2025-07-23 17:45:18 | INFO | Step 29: local_loss: 2.154555559158325 | global_loss: 2.3353829383850098 | total_loss: 4.489938735961914\n",
      "2025-07-23 17:46:18 | INFO | Step 30: local_loss: 1.9167683124542236 | global_loss: 1.8167059421539307 | total_loss: 3.7334742546081543\n",
      "2025-07-23 17:47:02 | INFO | Step 31: local_loss: 1.8936179876327515 | global_loss: 2.0791666507720947 | total_loss: 3.9727845191955566\n",
      "2025-07-23 17:49:59 | INFO | Step 34: local_loss: 1.8560088872909546 | global_loss: 2.029750347137451 | total_loss: 3.8857593536376953\n",
      "2025-07-23 17:50:59 | INFO | Step 35: local_loss: 2.338989496231079 | global_loss: 1.872417688369751 | total_loss: 4.21140718460083\n",
      "2025-07-23 17:51:59 | INFO | Step 36: local_loss: 2.169870615005493 | global_loss: 1.6666520833969116 | total_loss: 3.8365225791931152\n",
      "2025-07-23 17:52:59 | INFO | Step 37: local_loss: 2.1964962482452393 | global_loss: 1.6999398469924927 | total_loss: 3.8964362144470215\n",
      "2025-07-23 17:53:58 | INFO | Step 38: local_loss: 1.919708490371704 | global_loss: 1.6754491329193115 | total_loss: 3.5951576232910156\n",
      "2025-07-23 17:54:58 | INFO | Step 39: local_loss: 2.136180877685547 | global_loss: 0.9315085411071777 | total_loss: 3.0676894187927246\n",
      "2025-07-23 17:55:58 | INFO | Step 40: local_loss: 2.2912356853485107 | global_loss: 1.4605653285980225 | total_loss: 3.751801013946533\n",
      "2025-07-23 17:56:58 | INFO | Step 41: local_loss: 2.006370782852173 | global_loss: 1.4330236911773682 | total_loss: 3.439394474029541\n",
      "2025-07-23 17:57:57 | INFO | Step 42: local_loss: 2.560140609741211 | global_loss: 1.02170729637146 | total_loss: 3.581847906112671\n",
      "2025-07-23 17:58:56 | INFO | Step 43: local_loss: 2.403820037841797 | global_loss: 0.8190838694572449 | total_loss: 3.2229039669036865\n",
      "2025-07-23 17:59:53 | INFO | Step 44: local_loss: 2.316528797149658 | global_loss: 1.3707304000854492 | total_loss: 3.6872591972351074\n",
      "2025-07-23 18:00:38 | INFO | Step 45: local_loss: 2.0872762203216553 | global_loss: 2.043370246887207 | total_loss: 4.130646705627441\n",
      "2025-07-23 18:01:37 | INFO | Step 46: local_loss: 2.1686880588531494 | global_loss: 1.3945788145065308 | total_loss: 3.5632667541503906\n",
      "2025-07-23 18:02:37 | INFO | Step 47: local_loss: 1.9314111471176147 | global_loss: 1.9837217330932617 | total_loss: 3.915132999420166\n",
      "2025-07-23 18:03:34 | INFO | Step 48: local_loss: 1.5763829946517944 | global_loss: 0.9967706203460693 | total_loss: 2.573153495788574\n",
      "2025-07-23 18:04:34 | INFO | Step 49: local_loss: 2.2075603008270264 | global_loss: 1.4120838642120361 | total_loss: 3.6196441650390625\n",
      "2025-07-23 18:05:33 | INFO | Step 50: local_loss: 1.8113633394241333 | global_loss: 2.1968295574188232 | total_loss: 4.008193016052246\n",
      "2025-07-23 18:06:33 | INFO | Step 51: local_loss: 2.0305614471435547 | global_loss: 1.9342752695083618 | total_loss: 3.964836597442627\n",
      "2025-07-23 18:07:31 | INFO | Step 52: local_loss: 2.270744562149048 | global_loss: 1.3796333074569702 | total_loss: 3.6503777503967285\n",
      "2025-07-23 18:08:29 | INFO | Step 53: local_loss: 2.0322318077087402 | global_loss: 1.240684151649475 | total_loss: 3.272915840148926\n",
      "2025-07-23 18:09:28 | INFO | Step 54: local_loss: 2.010223865509033 | global_loss: 1.5067222118377686 | total_loss: 3.5169460773468018\n",
      "2025-07-23 18:10:26 | INFO | Step 55: local_loss: 2.045750856399536 | global_loss: 1.5335530042648315 | total_loss: 3.579303741455078\n",
      "2025-07-23 18:11:25 | INFO | Step 56: local_loss: 1.813455581665039 | global_loss: 1.3573365211486816 | total_loss: 3.1707921028137207\n",
      "2025-07-23 18:12:22 | INFO | Step 57: local_loss: 1.943041205406189 | global_loss: 0.957222580909729 | total_loss: 2.900263786315918\n",
      "2025-07-23 18:13:06 | INFO | Step 58: local_loss: 2.212186336517334 | global_loss: 1.323099136352539 | total_loss: 3.535285472869873\n",
      "2025-07-23 18:14:02 | INFO | Step 59: local_loss: 2.1217994689941406 | global_loss: 2.232952117919922 | total_loss: 4.3547515869140625\n",
      "2025-07-23 18:15:02 | INFO | Step 60: local_loss: 2.0912322998046875 | global_loss: 0.796188473701477 | total_loss: 2.887420654296875\n",
      "2025-07-23 18:16:02 | INFO | Step 61: local_loss: 2.1668379306793213 | global_loss: 0.8503400683403015 | total_loss: 3.0171780586242676\n",
      "2025-07-23 18:17:00 | INFO | Step 62: local_loss: 2.17647385597229 | global_loss: 0.6097413301467896 | total_loss: 2.786215305328369\n",
      "2025-07-23 18:18:00 | INFO | Step 63: local_loss: 1.9503165483474731 | global_loss: 0.7847488522529602 | total_loss: 2.735065460205078\n",
      "2025-07-23 18:18:59 | INFO | Step 64: local_loss: 2.276040554046631 | global_loss: 0.6946647763252258 | total_loss: 2.970705270767212\n",
      "2025-07-23 18:19:58 | INFO | Step 65: local_loss: 2.807224750518799 | global_loss: 0.9720596671104431 | total_loss: 3.7792844772338867\n",
      "2025-07-23 18:20:58 | INFO | Step 66: local_loss: 1.9116278886795044 | global_loss: 0.9420259594917297 | total_loss: 2.853653907775879\n",
      "2025-07-23 18:21:58 | INFO | Step 67: local_loss: 1.905038833618164 | global_loss: 0.6208692193031311 | total_loss: 2.5259079933166504\n",
      "2025-07-23 18:22:57 | INFO | Step 68: local_loss: 2.5446181297302246 | global_loss: 0.9484031200408936 | total_loss: 3.493021249771118\n",
      "2025-07-23 18:23:41 | INFO | Step 69: local_loss: 1.9094233512878418 | global_loss: 0.7032816410064697 | total_loss: 2.6127049922943115\n",
      "2025-07-23 18:24:42 | INFO | Step 70: local_loss: 2.0616307258605957 | global_loss: 0.8685150742530823 | total_loss: 2.930145740509033\n",
      "2025-07-23 18:25:41 | INFO | Step 71: local_loss: 1.910691499710083 | global_loss: 1.1875770092010498 | total_loss: 3.098268508911133\n",
      "2025-07-23 18:26:40 | INFO | Step 72: local_loss: 1.9712804555892944 | global_loss: 0.9646982550621033 | total_loss: 2.935978651046753\n",
      "2025-07-23 18:27:40 | INFO | Step 73: local_loss: 2.3136987686157227 | global_loss: 0.582269549369812 | total_loss: 2.895968437194824\n",
      "2025-07-23 18:28:38 | INFO | Step 74: local_loss: 1.7637284994125366 | global_loss: 1.3734374046325684 | total_loss: 3.1371660232543945\n",
      "2025-07-23 18:29:37 | INFO | Step 75: local_loss: 1.812056541442871 | global_loss: 1.0293725728988647 | total_loss: 2.8414292335510254\n",
      "2025-07-23 18:30:35 | INFO | Step 76: local_loss: 2.047109365463257 | global_loss: 1.2865961790084839 | total_loss: 3.333705425262451\n",
      "2025-07-23 18:31:34 | INFO | Step 77: local_loss: 2.082955837249756 | global_loss: 0.39825350046157837 | total_loss: 2.4812092781066895\n",
      "2025-07-23 18:32:33 | INFO | Step 78: local_loss: 1.8472529649734497 | global_loss: 1.6370654106140137 | total_loss: 3.484318256378174\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-07-23 18:33:34 | INFO | Step 79: local_loss: 1.94422447681427 | global_loss: 1.297013759613037 | total_loss: 3.2412381172180176\n",
      "2025-07-23 18:34:34 | INFO | Step 80: local_loss: 1.9436770677566528 | global_loss: 0.7712817192077637 | total_loss: 2.714958667755127\n",
      "2025-07-23 18:35:34 | INFO | Step 81: local_loss: 1.938585638999939 | global_loss: 0.8510512709617615 | total_loss: 2.7896368503570557\n",
      "2025-07-23 18:36:33 | INFO | Step 82: local_loss: 2.8828125 | global_loss: 0.2004348784685135 | total_loss: 3.083247423171997\n",
      "2025-07-23 18:37:33 | INFO | Step 83: local_loss: 1.7742871046066284 | global_loss: 1.2638866901397705 | total_loss: 3.0381736755371094\n",
      "2025-07-23 18:38:17 | INFO | Step 84: local_loss: 2.2514255046844482 | global_loss: 0.8083290457725525 | total_loss: 3.0597546100616455\n",
      "2025-07-23 18:39:16 | INFO | Step 85: local_loss: 1.9006980657577515 | global_loss: 1.054789423942566 | total_loss: 2.9554874897003174\n",
      "2025-07-23 18:40:15 | INFO | Step 86: local_loss: 1.4394999742507935 | global_loss: 0.5066155195236206 | total_loss: 1.946115493774414\n",
      "2025-07-23 18:41:13 | INFO | Step 87: local_loss: 2.2537028789520264 | global_loss: 1.6018884181976318 | total_loss: 3.855591297149658\n",
      "2025-07-23 18:42:13 | INFO | Step 88: local_loss: 1.8849931955337524 | global_loss: 0.580828070640564 | total_loss: 2.4658212661743164\n",
      "2025-07-23 18:43:14 | INFO | Step 89: local_loss: 1.9753997325897217 | global_loss: 0.7671849131584167 | total_loss: 2.742584705352783\n",
      "2025-07-23 18:44:13 | INFO | Step 90: local_loss: 2.142042875289917 | global_loss: 0.7352912425994873 | total_loss: 2.8773341178894043\n",
      "2025-07-23 18:45:13 | INFO | Step 91: local_loss: 1.9797613620758057 | global_loss: 0.9365944266319275 | total_loss: 2.916355848312378\n",
      "2025-07-23 18:46:12 | INFO | Step 92: local_loss: 2.045497179031372 | global_loss: 0.8119093179702759 | total_loss: 2.8574066162109375\n",
      "2025-07-23 18:47:12 | INFO | Step 93: local_loss: 2.5353622436523438 | global_loss: 0.4260565936565399 | total_loss: 2.961418867111206\n",
      "2025-07-23 18:48:12 | INFO | Step 94: local_loss: 1.6670947074890137 | global_loss: 1.0352604389190674 | total_loss: 2.702355146408081\n",
      "2025-07-23 18:49:12 | INFO | Step 95: local_loss: 1.7118000984191895 | global_loss: 0.8337530493736267 | total_loss: 2.545553207397461\n",
      "2025-07-23 18:50:10 | INFO | Step 96: local_loss: 1.7129513025283813 | global_loss: 0.8224226236343384 | total_loss: 2.5353739261627197\n",
      "2025-07-23 18:51:10 | INFO | Step 97: local_loss: 1.8041305541992188 | global_loss: 1.0355695486068726 | total_loss: 2.839700222015381\n",
      "2025-07-23 18:52:09 | INFO | Step 98: local_loss: 1.9881794452667236 | global_loss: 0.681654691696167 | total_loss: 2.6698341369628906\n",
      "2025-07-23 18:53:07 | INFO | Step 99: local_loss: 1.9851479530334473 | global_loss: 0.7409681677818298 | total_loss: 2.726116180419922\n",
      "2025-07-23 18:53:07 | INFO | Center node 7572 finished at step 100 with loss 2.7261\n",
      "2025-07-23 18:53:07 | INFO | \n",
      "=== Pretraining Epoch 2 (Center Node: 7964) ===\n",
      "2025-07-23 18:54:05 | INFO | Step 0: local_loss: 2.2073400020599365 | global_loss: 4.457853317260742 | total_loss: 6.665193557739258\n",
      "2025-07-23 18:55:04 | INFO | Step 1: local_loss: 2.46726131439209 | global_loss: 2.3802926540374756 | total_loss: 4.8475542068481445\n",
      "2025-07-23 18:56:02 | INFO | Step 2: local_loss: 2.3512048721313477 | global_loss: 1.7843900918960571 | total_loss: 4.135594844818115\n",
      "2025-07-23 18:57:00 | INFO | Step 3: local_loss: 1.9593154191970825 | global_loss: 1.5330113172531128 | total_loss: 3.4923267364501953\n",
      "2025-07-23 18:57:58 | INFO | Step 4: local_loss: 2.230435848236084 | global_loss: 2.45444655418396 | total_loss: 4.684882164001465\n",
      "2025-07-23 18:58:57 | INFO | Step 5: local_loss: 1.821516513824463 | global_loss: 3.5780203342437744 | total_loss: 5.399537086486816\n",
      "2025-07-23 18:59:54 | INFO | Step 6: local_loss: 2.277222156524658 | global_loss: 2.095224380493164 | total_loss: 4.372446537017822\n",
      "2025-07-23 19:00:53 | INFO | Step 7: local_loss: 2.0798051357269287 | global_loss: 2.1507813930511475 | total_loss: 4.230586528778076\n",
      "2025-07-23 19:01:50 | INFO | Step 8: local_loss: 1.980204463005066 | global_loss: 2.817295551300049 | total_loss: 4.797500133514404\n",
      "2025-07-23 19:02:49 | INFO | Step 9: local_loss: 1.846919059753418 | global_loss: 2.1663520336151123 | total_loss: 4.013271331787109\n",
      "2025-07-23 19:03:48 | INFO | Step 10: local_loss: 2.276318073272705 | global_loss: 1.9271742105484009 | total_loss: 4.203492164611816\n",
      "2025-07-23 19:04:45 | INFO | Step 11: local_loss: 1.7105965614318848 | global_loss: 2.964721918106079 | total_loss: 4.675318717956543\n",
      "2025-07-23 19:05:43 | INFO | Step 12: local_loss: 1.8775765895843506 | global_loss: 1.9286625385284424 | total_loss: 3.806239128112793\n",
      "2025-07-23 19:06:41 | INFO | Step 13: local_loss: 2.315131187438965 | global_loss: 2.226870536804199 | total_loss: 4.542001724243164\n",
      "2025-07-23 19:07:40 | INFO | Step 14: local_loss: 1.9621468782424927 | global_loss: 2.377638816833496 | total_loss: 4.339785575866699\n",
      "2025-07-23 19:08:38 | INFO | Step 15: local_loss: 2.0056703090667725 | global_loss: 2.053586721420288 | total_loss: 4.0592570304870605\n",
      "2025-07-23 19:09:34 | INFO | Step 16: local_loss: 2.060096502304077 | global_loss: 1.6672320365905762 | total_loss: 3.7273285388946533\n",
      "2025-07-23 19:10:32 | INFO | Step 17: local_loss: 2.0064289569854736 | global_loss: 2.424677848815918 | total_loss: 4.4311065673828125\n",
      "2025-07-23 19:11:30 | INFO | Step 18: local_loss: 1.7516636848449707 | global_loss: 1.487051010131836 | total_loss: 3.2387146949768066\n",
      "2025-07-23 19:12:15 | INFO | Step 19: local_loss: 1.8119351863861084 | global_loss: 2.1937129497528076 | total_loss: 4.005648136138916\n",
      "2025-07-23 19:13:13 | INFO | Step 20: local_loss: 2.0154247283935547 | global_loss: 1.5537806749343872 | total_loss: 3.5692052841186523\n",
      "2025-07-23 19:13:58 | INFO | Step 21: local_loss: 1.8677371740341187 | global_loss: 1.142441987991333 | total_loss: 3.010179042816162\n",
      "2025-07-23 19:14:57 | INFO | Step 22: local_loss: 2.3350017070770264 | global_loss: 1.5004831552505493 | total_loss: 3.8354849815368652\n",
      "2025-07-23 19:15:56 | INFO | Step 23: local_loss: 2.1024298667907715 | global_loss: 2.1351349353790283 | total_loss: 4.237565040588379\n",
      "2025-07-23 19:16:55 | INFO | Step 24: local_loss: 1.8435205221176147 | global_loss: 2.259320020675659 | total_loss: 4.102840423583984\n",
      "2025-07-23 19:17:54 | INFO | Step 25: local_loss: 2.4503157138824463 | global_loss: 0.8247754573822021 | total_loss: 3.2750911712646484\n",
      "2025-07-23 19:18:52 | INFO | Step 26: local_loss: 2.1862523555755615 | global_loss: 1.1854900121688843 | total_loss: 3.3717422485351562\n",
      "2025-07-23 19:19:51 | INFO | Step 27: local_loss: 1.6620851755142212 | global_loss: 2.789608955383301 | total_loss: 4.451694011688232\n",
      "2025-07-23 19:20:49 | INFO | Step 28: local_loss: 2.505160093307495 | global_loss: 0.8509951829910278 | total_loss: 3.3561553955078125\n",
      "2025-07-23 19:21:48 | INFO | Step 29: local_loss: 1.6962699890136719 | global_loss: 1.8831367492675781 | total_loss: 3.57940673828125\n",
      "2025-07-23 19:22:46 | INFO | Step 30: local_loss: 1.7156200408935547 | global_loss: 1.0791081190109253 | total_loss: 2.7947282791137695\n",
      "2025-07-23 19:23:44 | INFO | Step 31: local_loss: 1.9139169454574585 | global_loss: 1.4829703569412231 | total_loss: 3.3968873023986816\n",
      "2025-07-23 19:24:43 | INFO | Step 32: local_loss: 1.977596640586853 | global_loss: 1.8372632265090942 | total_loss: 3.8148598670959473\n",
      "2025-07-23 19:25:41 | INFO | Step 33: local_loss: 1.5782197713851929 | global_loss: 1.6352711915969849 | total_loss: 3.2134909629821777\n",
      "2025-07-23 19:26:40 | INFO | Step 34: local_loss: 1.587260127067566 | global_loss: 1.8750383853912354 | total_loss: 3.4622983932495117\n",
      "2025-07-23 19:27:41 | INFO | Step 35: local_loss: 1.7596005201339722 | global_loss: 1.708938479423523 | total_loss: 3.468538999557495\n",
      "2025-07-23 19:28:39 | INFO | Step 36: local_loss: 2.082752227783203 | global_loss: 2.196959972381592 | total_loss: 4.279712200164795\n",
      "2025-07-23 19:29:38 | INFO | Step 37: local_loss: 1.504873275756836 | global_loss: 2.304276943206787 | total_loss: 3.809150218963623\n",
      "2025-07-23 19:30:37 | INFO | Step 38: local_loss: 1.9857561588287354 | global_loss: 1.7924363613128662 | total_loss: 3.7781925201416016\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-07-23 19:31:22 | INFO | Step 39: local_loss: 2.165379762649536 | global_loss: 2.0661509037017822 | total_loss: 4.231530666351318\n",
      "2025-07-23 19:32:21 | INFO | Step 40: local_loss: 2.0176613330841064 | global_loss: 0.741580605506897 | total_loss: 2.759242057800293\n",
      "2025-07-23 19:33:19 | INFO | Step 41: local_loss: 1.8399693965911865 | global_loss: 1.3165738582611084 | total_loss: 3.156543254852295\n",
      "2025-07-23 19:34:17 | INFO | Step 42: local_loss: 2.002777576446533 | global_loss: 1.4097431898117065 | total_loss: 3.4125208854675293\n",
      "2025-07-23 19:35:16 | INFO | Step 43: local_loss: 1.7693358659744263 | global_loss: 1.3548988103866577 | total_loss: 3.124234676361084\n",
      "2025-07-23 19:36:16 | INFO | Step 44: local_loss: 2.1160497665405273 | global_loss: 1.341356635093689 | total_loss: 3.457406520843506\n",
      "2025-07-23 19:37:13 | INFO | Step 45: local_loss: 1.5401809215545654 | global_loss: 2.334418535232544 | total_loss: 3.8745994567871094\n",
      "2025-07-23 19:38:12 | INFO | Step 46: local_loss: 1.4311796426773071 | global_loss: 2.160564422607422 | total_loss: 3.5917439460754395\n",
      "2025-07-23 19:39:09 | INFO | Step 47: local_loss: 1.7772274017333984 | global_loss: 1.179017424583435 | total_loss: 2.956244945526123\n",
      "2025-07-23 19:39:54 | INFO | Step 48: local_loss: 1.9890927076339722 | global_loss: 2.2320337295532227 | total_loss: 4.221126556396484\n",
      "2025-07-23 19:40:51 | INFO | Step 49: local_loss: 2.182722330093384 | global_loss: 1.5089448690414429 | total_loss: 3.691667079925537\n",
      "2025-07-23 19:41:51 | INFO | Step 50: local_loss: 1.9757781028747559 | global_loss: 2.0042202472686768 | total_loss: 3.9799983501434326\n",
      "2025-07-23 19:42:48 | INFO | Step 51: local_loss: 1.7092318534851074 | global_loss: 1.684998631477356 | total_loss: 3.394230365753174\n",
      "2025-07-23 19:43:44 | INFO | Step 52: local_loss: 2.0638654232025146 | global_loss: 1.1244360208511353 | total_loss: 3.1883015632629395\n",
      "2025-07-23 19:44:41 | INFO | Step 53: local_loss: 1.9216620922088623 | global_loss: 1.7908921241760254 | total_loss: 3.7125542163848877\n",
      "2025-07-23 19:45:37 | INFO | Step 54: local_loss: 1.466199278831482 | global_loss: 1.5749118328094482 | total_loss: 3.0411109924316406\n",
      "2025-07-23 19:46:34 | INFO | Step 55: local_loss: 1.8087224960327148 | global_loss: 1.6073436737060547 | total_loss: 3.4160661697387695\n",
      "2025-07-23 19:47:32 | INFO | Step 56: local_loss: 1.5759152173995972 | global_loss: 1.333351969718933 | total_loss: 2.9092671871185303\n",
      "2025-07-23 19:48:17 | INFO | Step 57: local_loss: 1.7354310750961304 | global_loss: 2.020094871520996 | total_loss: 3.755526065826416\n",
      "2025-07-23 19:49:14 | INFO | Step 58: local_loss: 1.44686758518219 | global_loss: 1.4343745708465576 | total_loss: 2.881242275238037\n",
      "2025-07-23 19:50:13 | INFO | Step 59: local_loss: 1.9964945316314697 | global_loss: 1.0507820844650269 | total_loss: 3.047276496887207\n",
      "2025-07-23 19:51:12 | INFO | Step 60: local_loss: 1.5804423093795776 | global_loss: 1.2163416147232056 | total_loss: 2.796783924102783\n",
      "2025-07-23 19:52:10 | INFO | Step 61: local_loss: 1.5542765855789185 | global_loss: 1.6449848413467407 | total_loss: 3.199261426925659\n",
      "2025-07-23 19:53:08 | INFO | Step 62: local_loss: 1.9784948825836182 | global_loss: 0.8773495554924011 | total_loss: 2.855844497680664\n",
      "2025-07-23 19:53:53 | INFO | Step 63: local_loss: 1.774851679801941 | global_loss: 1.1027833223342896 | total_loss: 2.8776350021362305\n",
      "2025-07-23 19:54:50 | INFO | Step 64: local_loss: 2.165415048599243 | global_loss: 0.5541491508483887 | total_loss: 2.719564199447632\n",
      "2025-07-23 19:55:50 | INFO | Step 65: local_loss: 1.8252052068710327 | global_loss: 1.2381657361984253 | total_loss: 3.063370943069458\n",
      "2025-07-23 19:56:50 | INFO | Step 66: local_loss: 1.7773809432983398 | global_loss: 1.0392519235610962 | total_loss: 2.8166327476501465\n",
      "2025-07-23 19:57:49 | INFO | Step 67: local_loss: 1.9083727598190308 | global_loss: 0.8724274039268494 | total_loss: 2.7808001041412354\n",
      "2025-07-23 19:58:47 | INFO | Step 68: local_loss: 2.2850358486175537 | global_loss: 0.5900204181671143 | total_loss: 2.875056266784668\n",
      "2025-07-23 19:59:46 | INFO | Step 69: local_loss: 2.64760160446167 | global_loss: 0.017381291836500168 | total_loss: 2.664982795715332\n",
      "2025-07-23 20:00:44 | INFO | Step 70: local_loss: 2.176086664199829 | global_loss: 0.5624211430549622 | total_loss: 2.7385077476501465\n",
      "2025-07-23 20:01:44 | INFO | Step 71: local_loss: 1.8264296054840088 | global_loss: 1.258040428161621 | total_loss: 3.08447003364563\n",
      "2025-07-23 20:02:29 | INFO | Step 72: local_loss: 1.646787166595459 | global_loss: 1.645592212677002 | total_loss: 3.292379379272461\n",
      "2025-07-23 20:03:27 | INFO | Step 73: local_loss: 2.532755136489868 | global_loss: 0.3329019844532013 | total_loss: 2.865657091140747\n",
      "2025-07-23 20:04:28 | INFO | Step 74: local_loss: 1.8989510536193848 | global_loss: 0.6888007521629333 | total_loss: 2.587751865386963\n",
      "2025-07-23 20:05:26 | INFO | Step 75: local_loss: 1.631335973739624 | global_loss: 1.4588508605957031 | total_loss: 3.090186834335327\n",
      "2025-07-23 20:06:25 | INFO | Step 76: local_loss: 2.627142906188965 | global_loss: 1.9423412084579468 | total_loss: 4.569484233856201\n",
      "2025-07-23 20:07:23 | INFO | Step 77: local_loss: 1.8840869665145874 | global_loss: 1.014086365699768 | total_loss: 2.8981733322143555\n",
      "2025-07-23 20:08:22 | INFO | Step 78: local_loss: 2.0768885612487793 | global_loss: 1.0014081001281738 | total_loss: 3.078296661376953\n",
      "2025-07-23 20:09:22 | INFO | Step 79: local_loss: 1.6459304094314575 | global_loss: 0.6982823610305786 | total_loss: 2.344212770462036\n",
      "2025-07-23 20:10:23 | INFO | Step 80: local_loss: 2.089128017425537 | global_loss: 0.3059178292751312 | total_loss: 2.395045757293701\n",
      "2025-07-23 20:11:22 | INFO | Step 81: local_loss: 1.7674460411071777 | global_loss: 1.1750203371047974 | total_loss: 2.9424662590026855\n",
      "2025-07-23 20:12:23 | INFO | Step 82: local_loss: 1.7317757606506348 | global_loss: 0.8470014929771423 | total_loss: 2.578777313232422\n",
      "2025-07-23 20:13:24 | INFO | Step 83: local_loss: 1.61104154586792 | global_loss: 1.2605820894241333 | total_loss: 2.8716235160827637\n",
      "2025-07-23 20:14:09 | INFO | Step 84: local_loss: 1.8117482662200928 | global_loss: 1.081609845161438 | total_loss: 2.8933582305908203\n",
      "2025-07-23 20:15:08 | INFO | Step 85: local_loss: 1.4644320011138916 | global_loss: 2.329349994659424 | total_loss: 3.7937819957733154\n",
      "2025-07-23 20:16:07 | INFO | Step 86: local_loss: 2.1229867935180664 | global_loss: 1.9616261720657349 | total_loss: 4.084612846374512\n",
      "2025-07-23 20:17:05 | INFO | Step 87: local_loss: 1.8160613775253296 | global_loss: 0.7821791768074036 | total_loss: 2.598240613937378\n",
      "2025-07-23 20:18:05 | INFO | Step 88: local_loss: 1.7555506229400635 | global_loss: 0.8647224307060242 | total_loss: 2.6202731132507324\n",
      "2025-07-23 20:19:03 | INFO | Step 89: local_loss: 1.8333094120025635 | global_loss: 0.6319867372512817 | total_loss: 2.4652962684631348\n",
      "2025-07-23 20:20:02 | INFO | Step 90: local_loss: 1.5377004146575928 | global_loss: 1.8486589193344116 | total_loss: 3.386359214782715\n",
      "2025-07-23 20:21:02 | INFO | Step 91: local_loss: 1.7979047298431396 | global_loss: 1.2417535781860352 | total_loss: 3.039658308029175\n",
      "2025-07-23 20:22:01 | INFO | Step 92: local_loss: 1.8405990600585938 | global_loss: 0.7949446439743042 | total_loss: 2.6355438232421875\n",
      "2025-07-23 20:23:00 | INFO | Step 93: local_loss: 1.7978272438049316 | global_loss: 1.2504897117614746 | total_loss: 3.0483169555664062\n",
      "2025-07-23 20:24:00 | INFO | Step 94: local_loss: 1.546749234199524 | global_loss: 1.9610852003097534 | total_loss: 3.5078344345092773\n",
      "2025-07-23 20:24:57 | INFO | Step 95: local_loss: 1.718437910079956 | global_loss: 1.1847445964813232 | total_loss: 2.9031825065612793\n",
      "2025-07-23 20:25:56 | INFO | Step 96: local_loss: 2.3879683017730713 | global_loss: 0.5682619214057922 | total_loss: 2.9562301635742188\n",
      "2025-07-23 20:26:53 | INFO | Step 97: local_loss: 2.044113874435425 | global_loss: 1.2024046182632446 | total_loss: 3.246518611907959\n",
      "2025-07-23 20:27:52 | INFO | Step 98: local_loss: 1.9195492267608643 | global_loss: 1.1776803731918335 | total_loss: 3.097229480743408\n",
      "2025-07-23 20:28:50 | INFO | Step 99: local_loss: 1.4373061656951904 | global_loss: 1.453065037727356 | total_loss: 2.890371322631836\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-07-23 20:28:51 | INFO | Center node 7964 finished at step 100 with loss 2.8904\n",
      "2025-07-23 20:28:51 | INFO | \n",
      "=== Pretraining Epoch 3 (Center Node: 7312) ===\n",
      "2025-07-23 20:29:48 | INFO | Step 0: local_loss: 2.611265182495117 | global_loss: 1.887574553489685 | total_loss: 4.498839855194092\n",
      "2025-07-23 20:30:48 | INFO | Step 1: local_loss: 2.5629594326019287 | global_loss: 1.6281580924987793 | total_loss: 4.191117286682129\n",
      "2025-07-23 20:31:32 | INFO | Step 2: local_loss: 2.628385543823242 | global_loss: 1.8282350301742554 | total_loss: 4.456620693206787\n",
      "2025-07-23 20:32:30 | INFO | Step 3: local_loss: 2.6913435459136963 | global_loss: 1.7240105867385864 | total_loss: 4.415354251861572\n",
      "2025-07-23 20:33:29 | INFO | Step 4: local_loss: 3.2010984420776367 | global_loss: 1.56014883518219 | total_loss: 4.761247158050537\n",
      "2025-07-23 20:34:27 | INFO | Step 5: local_loss: 2.24709415435791 | global_loss: 2.4083123207092285 | total_loss: 4.655406475067139\n",
      "2025-07-23 20:35:26 | INFO | Step 6: local_loss: 2.3606767654418945 | global_loss: 1.4934829473495483 | total_loss: 3.8541598320007324\n",
      "2025-07-23 20:36:25 | INFO | Step 7: local_loss: 2.882575511932373 | global_loss: 2.0358521938323975 | total_loss: 4.918427467346191\n",
      "2025-07-23 20:37:23 | INFO | Step 8: local_loss: 2.422567129135132 | global_loss: 1.5030635595321655 | total_loss: 3.925630569458008\n",
      "2025-07-23 20:38:21 | INFO | Step 9: local_loss: 2.8990252017974854 | global_loss: 1.397924542427063 | total_loss: 4.296949863433838\n",
      "2025-07-23 20:39:20 | INFO | Step 10: local_loss: 2.7932939529418945 | global_loss: 1.6979807615280151 | total_loss: 4.491274833679199\n",
      "2025-07-23 20:40:19 | INFO | Step 11: local_loss: 2.2358484268188477 | global_loss: 2.012289524078369 | total_loss: 4.248137950897217\n",
      "2025-07-23 20:41:17 | INFO | Step 12: local_loss: 2.423456907272339 | global_loss: 1.4893041849136353 | total_loss: 3.9127612113952637\n",
      "2025-07-23 20:42:17 | INFO | Step 13: local_loss: 2.5215976238250732 | global_loss: 1.3980026245117188 | total_loss: 3.919600248336792\n",
      "2025-07-23 20:43:14 | INFO | Step 14: local_loss: 2.5105886459350586 | global_loss: 2.051354169845581 | total_loss: 4.561943054199219\n",
      "2025-07-23 20:44:12 | INFO | Step 15: local_loss: 2.032479763031006 | global_loss: 1.55784273147583 | total_loss: 3.590322494506836\n",
      "2025-07-23 20:45:10 | INFO | Step 16: local_loss: 2.2799437046051025 | global_loss: 1.4220951795578003 | total_loss: 3.7020387649536133\n",
      "2025-07-23 20:46:09 | INFO | Step 17: local_loss: 2.3092217445373535 | global_loss: 1.697556972503662 | total_loss: 4.006778717041016\n",
      "2025-07-23 20:47:06 | INFO | Step 18: local_loss: 2.3962082862854004 | global_loss: 1.6762824058532715 | total_loss: 4.072490692138672\n",
      "2025-07-23 20:48:04 | INFO | Step 19: local_loss: 2.4010121822357178 | global_loss: 1.8539021015167236 | total_loss: 4.254914283752441\n",
      "2025-07-23 20:49:03 | INFO | Step 20: local_loss: 2.0351500511169434 | global_loss: 1.9084010124206543 | total_loss: 3.9435510635375977\n",
      "2025-07-23 20:50:02 | INFO | Step 21: local_loss: 2.190776824951172 | global_loss: 1.8475922346115112 | total_loss: 4.038369178771973\n",
      "2025-07-23 20:51:00 | INFO | Step 22: local_loss: 2.546161413192749 | global_loss: 1.7719030380249023 | total_loss: 4.3180646896362305\n",
      "2025-07-23 20:52:00 | INFO | Step 23: local_loss: 2.2211787700653076 | global_loss: 1.410853624343872 | total_loss: 3.6320323944091797\n",
      "2025-07-23 20:52:59 | INFO | Step 24: local_loss: 2.623213768005371 | global_loss: 1.1415290832519531 | total_loss: 3.764742851257324\n",
      "2025-07-23 20:53:57 | INFO | Step 25: local_loss: 2.5422627925872803 | global_loss: 2.563973903656006 | total_loss: 5.106236457824707\n",
      "2025-07-23 20:54:55 | INFO | Step 26: local_loss: 1.8311916589736938 | global_loss: 1.745166301727295 | total_loss: 3.576357841491699\n",
      "2025-07-23 20:55:54 | INFO | Step 27: local_loss: 2.2064170837402344 | global_loss: 1.3212535381317139 | total_loss: 3.5276706218719482\n",
      "2025-07-23 20:56:52 | INFO | Step 28: local_loss: 2.0550928115844727 | global_loss: 1.6887664794921875 | total_loss: 3.74385929107666\n",
      "2025-07-23 20:57:52 | INFO | Step 29: local_loss: 1.831835150718689 | global_loss: 2.1722166538238525 | total_loss: 4.004051685333252\n",
      "2025-07-23 20:58:50 | INFO | Step 30: local_loss: 1.9574735164642334 | global_loss: 1.0571588277816772 | total_loss: 3.014632225036621\n",
      "2025-07-23 20:59:48 | INFO | Step 31: local_loss: 2.0686566829681396 | global_loss: 1.432391881942749 | total_loss: 3.5010485649108887\n",
      "2025-07-23 21:00:33 | INFO | Step 32: local_loss: 2.498624801635742 | global_loss: 0.580473005771637 | total_loss: 3.0790977478027344\n",
      "2025-07-23 21:01:33 | INFO | Step 33: local_loss: 1.9102376699447632 | global_loss: 1.3283123970031738 | total_loss: 3.2385501861572266\n",
      "2025-07-23 21:02:32 | INFO | Step 34: local_loss: 1.8509631156921387 | global_loss: 1.7613312005996704 | total_loss: 3.6122941970825195\n",
      "2025-07-23 21:03:32 | INFO | Step 35: local_loss: 2.034158945083618 | global_loss: 1.6198807954788208 | total_loss: 3.6540398597717285\n",
      "2025-07-23 21:04:31 | INFO | Step 36: local_loss: 2.4703989028930664 | global_loss: 1.493179440498352 | total_loss: 3.963578224182129\n",
      "2025-07-23 21:05:29 | INFO | Step 37: local_loss: 2.479367971420288 | global_loss: 0.9159584641456604 | total_loss: 3.3953263759613037\n",
      "2025-07-23 21:06:29 | INFO | Step 38: local_loss: 2.2477688789367676 | global_loss: 1.2817802429199219 | total_loss: 3.5295491218566895\n",
      "2025-07-23 21:07:27 | INFO | Step 39: local_loss: 2.21332049369812 | global_loss: 0.9733827114105225 | total_loss: 3.1867032051086426\n",
      "2025-07-23 21:08:26 | INFO | Step 40: local_loss: 2.9633543491363525 | global_loss: 1.1646804809570312 | total_loss: 4.128034591674805\n",
      "2025-07-23 21:09:10 | INFO | Step 41: local_loss: 2.0686473846435547 | global_loss: 1.7326658964157104 | total_loss: 3.8013134002685547\n",
      "2025-07-23 21:10:05 | INFO | Step 42: local_loss: 2.152449607849121 | global_loss: 0.9532415270805359 | total_loss: 3.1056911945343018\n",
      "2025-07-23 21:11:05 | INFO | Step 43: local_loss: 2.408034563064575 | global_loss: 1.033583402633667 | total_loss: 3.441617965698242\n",
      "2025-07-23 21:12:04 | INFO | Step 44: local_loss: 2.0494399070739746 | global_loss: 1.5444772243499756 | total_loss: 3.59391713142395\n",
      "2025-07-23 21:13:02 | INFO | Step 45: local_loss: 2.2211360931396484 | global_loss: 0.9739547371864319 | total_loss: 3.1950907707214355\n",
      "2025-07-23 21:14:00 | INFO | Step 46: local_loss: 2.4536705017089844 | global_loss: 1.3748241662979126 | total_loss: 3.8284945487976074\n",
      "2025-07-23 21:14:58 | INFO | Step 47: local_loss: 1.8944342136383057 | global_loss: 1.3948262929916382 | total_loss: 3.2892603874206543\n",
      "2025-07-23 21:15:55 | INFO | Step 48: local_loss: 2.0776195526123047 | global_loss: 0.9957975149154663 | total_loss: 3.0734171867370605\n",
      "2025-07-23 21:16:55 | INFO | Step 49: local_loss: 1.816597580909729 | global_loss: 1.1474932432174683 | total_loss: 2.9640908241271973\n",
      "2025-07-23 21:17:53 | INFO | Step 50: local_loss: 1.9793627262115479 | global_loss: 1.722415566444397 | total_loss: 3.7017784118652344\n",
      "2025-07-23 21:18:51 | INFO | Step 51: local_loss: 2.3200502395629883 | global_loss: 0.6033056974411011 | total_loss: 2.923356056213379\n",
      "2025-07-23 21:19:49 | INFO | Step 52: local_loss: 1.85051429271698 | global_loss: 1.021966814994812 | total_loss: 2.872481107711792\n",
      "2025-07-23 21:20:48 | INFO | Step 53: local_loss: 1.8253127336502075 | global_loss: 0.8952832818031311 | total_loss: 2.7205960750579834\n",
      "2025-07-23 21:21:47 | INFO | Step 54: local_loss: 2.078481912612915 | global_loss: 1.3172075748443604 | total_loss: 3.3956894874572754\n",
      "2025-07-23 21:22:46 | INFO | Step 55: local_loss: 2.200817108154297 | global_loss: 0.9536855816841125 | total_loss: 3.1545026302337646\n",
      "2025-07-23 21:23:43 | INFO | Step 56: local_loss: 2.1877644062042236 | global_loss: 1.5606224536895752 | total_loss: 3.748386859893799\n",
      "2025-07-23 21:24:42 | INFO | Step 57: local_loss: 1.9745162725448608 | global_loss: 1.0391510725021362 | total_loss: 3.013667345046997\n",
      "2025-07-23 21:25:40 | INFO | Step 58: local_loss: 2.3321526050567627 | global_loss: 0.37699228525161743 | total_loss: 2.7091448307037354\n",
      "2025-07-23 21:26:37 | INFO | Step 59: local_loss: 2.8874359130859375 | global_loss: 0.7013680934906006 | total_loss: 3.588804006576538\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-07-23 21:27:35 | INFO | Step 60: local_loss: 2.3961610794067383 | global_loss: 0.42796728014945984 | total_loss: 2.8241283893585205\n",
      "2025-07-23 21:28:33 | INFO | Step 61: local_loss: 2.085479974746704 | global_loss: 0.6366825699806213 | total_loss: 2.7221624851226807\n",
      "2025-07-23 21:29:28 | INFO | Step 62: local_loss: 2.2700893878936768 | global_loss: 1.1342428922653198 | total_loss: 3.404332160949707\n",
      "2025-07-23 21:30:12 | INFO | Step 63: local_loss: 1.8778499364852905 | global_loss: 1.2558941841125488 | total_loss: 3.133744239807129\n",
      "2025-07-23 21:31:09 | INFO | Step 64: local_loss: 1.8808120489120483 | global_loss: 0.8081212639808655 | total_loss: 2.6889333724975586\n",
      "2025-07-23 21:32:07 | INFO | Step 65: local_loss: 2.04594087600708 | global_loss: 1.1947804689407349 | total_loss: 3.2407212257385254\n",
      "2025-07-23 21:33:06 | INFO | Step 66: local_loss: 2.090834856033325 | global_loss: 1.6042970418930054 | total_loss: 3.695131778717041\n",
      "2025-07-23 21:33:50 | INFO | Step 67: local_loss: 1.9976314306259155 | global_loss: 1.2227367162704468 | total_loss: 3.2203681468963623\n",
      "2025-07-23 21:35:48 | INFO | Step 69: local_loss: 1.8094477653503418 | global_loss: 1.152884602546692 | total_loss: 2.962332248687744\n",
      "2025-07-23 21:36:46 | INFO | Step 70: local_loss: 2.0283749103546143 | global_loss: 0.5198293328285217 | total_loss: 2.548204183578491\n",
      "2025-07-23 21:37:47 | INFO | Step 71: local_loss: 2.40213942527771 | global_loss: 0.6463786363601685 | total_loss: 3.048518180847168\n",
      "2025-07-23 21:38:45 | INFO | Step 72: local_loss: 1.971054196357727 | global_loss: 0.8717003464698792 | total_loss: 2.842754602432251\n",
      "2025-07-23 21:39:44 | INFO | Step 73: local_loss: 2.432676076889038 | global_loss: 0.4899870455265045 | total_loss: 2.9226632118225098\n",
      "2025-07-23 21:40:44 | INFO | Step 74: local_loss: 3.091090679168701 | global_loss: 0.35796087980270386 | total_loss: 3.44905161857605\n",
      "2025-07-23 21:41:41 | INFO | Step 75: local_loss: 2.1268599033355713 | global_loss: 0.1996569186449051 | total_loss: 2.32651686668396\n",
      "2025-07-23 21:42:40 | INFO | Step 76: local_loss: 1.8773760795593262 | global_loss: 0.6100357174873352 | total_loss: 2.4874117374420166\n",
      "2025-07-23 21:43:40 | INFO | Step 77: local_loss: 2.437971353530884 | global_loss: 0.5659354329109192 | total_loss: 3.003906726837158\n",
      "2025-07-23 21:44:38 | INFO | Step 78: local_loss: 3.017987012863159 | global_loss: 0.4182147681713104 | total_loss: 3.436201810836792\n",
      "2025-07-23 21:45:35 | INFO | Step 79: local_loss: 1.7284901142120361 | global_loss: 0.5425997376441956 | total_loss: 2.271089792251587\n",
      "2025-07-23 21:46:33 | INFO | Step 80: local_loss: 2.8740901947021484 | global_loss: 3.7150039672851562 | total_loss: 6.589094161987305\n",
      "2025-07-23 21:47:29 | INFO | Step 81: local_loss: 1.9669688940048218 | global_loss: 1.9803781509399414 | total_loss: 3.9473471641540527\n",
      "2025-07-23 21:48:28 | INFO | Step 82: local_loss: 1.8734859228134155 | global_loss: 1.2197803258895874 | total_loss: 3.093266248703003\n",
      "2025-07-23 21:49:28 | INFO | Step 83: local_loss: 1.8480898141860962 | global_loss: 0.8905695080757141 | total_loss: 2.738659381866455\n",
      "2025-07-23 21:50:27 | INFO | Step 84: local_loss: 2.1592507362365723 | global_loss: 0.598617434501648 | total_loss: 2.7578682899475098\n",
      "2025-07-23 21:51:25 | INFO | Step 85: local_loss: 2.044861316680908 | global_loss: 0.9214075803756714 | total_loss: 2.966269016265869\n",
      "2025-07-23 21:52:24 | INFO | Step 86: local_loss: 2.3599846363067627 | global_loss: 0.9599274396896362 | total_loss: 3.3199119567871094\n",
      "2025-07-23 21:53:23 | INFO | Step 87: local_loss: 1.9485235214233398 | global_loss: 0.9521735906600952 | total_loss: 2.9006972312927246\n",
      "2025-07-23 21:54:07 | INFO | Step 88: local_loss: 1.6917262077331543 | global_loss: 1.0106126070022583 | total_loss: 2.702338695526123\n",
      "2025-07-23 21:55:06 | INFO | Step 89: local_loss: 2.074230432510376 | global_loss: 1.0585039854049683 | total_loss: 3.1327342987060547\n",
      "2025-07-23 21:56:05 | INFO | Step 90: local_loss: 1.9750488996505737 | global_loss: 1.1504557132720947 | total_loss: 3.125504493713379\n",
      "2025-07-23 21:57:04 | INFO | Step 91: local_loss: 1.6748199462890625 | global_loss: 1.683606505393982 | total_loss: 3.358426570892334\n",
      "2025-07-23 21:58:03 | INFO | Step 92: local_loss: 1.9533629417419434 | global_loss: 1.3481864929199219 | total_loss: 3.3015494346618652\n",
      "2025-07-23 21:59:02 | INFO | Step 93: local_loss: 2.1398565769195557 | global_loss: 1.136029601097107 | total_loss: 3.275886058807373\n",
      "2025-07-23 21:59:59 | INFO | Step 94: local_loss: 1.9937375783920288 | global_loss: 1.1670124530792236 | total_loss: 3.160749912261963\n",
      "2025-07-23 22:00:59 | INFO | Step 95: local_loss: 1.9001896381378174 | global_loss: 0.9990315437316895 | total_loss: 2.899221181869507\n",
      "2025-07-23 22:01:57 | INFO | Step 96: local_loss: 1.8936495780944824 | global_loss: 1.0209475755691528 | total_loss: 2.9145970344543457\n",
      "2025-07-23 22:02:56 | INFO | Step 97: local_loss: 2.186591148376465 | global_loss: 0.5050686001777649 | total_loss: 2.691659688949585\n",
      "2025-07-23 22:03:54 | INFO | Step 98: local_loss: 2.2842137813568115 | global_loss: 0.7434479594230652 | total_loss: 3.0276618003845215\n",
      "2025-07-23 22:04:52 | INFO | Step 99: local_loss: 2.01774263381958 | global_loss: 0.6024248003959656 | total_loss: 2.6201674938201904\n",
      "2025-07-23 22:04:52 | INFO | Center node 7312 finished at step 100 with loss 2.6202\n",
      "2025-07-23 22:04:52 | INFO | \n",
      "=== Pretraining Epoch 4 (Center Node: 6714) ===\n",
      "2025-07-23 22:05:51 | INFO | Step 0: local_loss: 2.3953752517700195 | global_loss: 0.4311635494232178 | total_loss: 2.8265388011932373\n",
      "2025-07-23 22:06:51 | INFO | Step 1: local_loss: 1.2904152870178223 | global_loss: 1.3172187805175781 | total_loss: 2.6076340675354004\n",
      "2025-07-23 22:07:51 | INFO | Step 2: local_loss: 1.4257185459136963 | global_loss: 1.8226372003555298 | total_loss: 3.2483558654785156\n",
      "2025-07-23 22:08:48 | INFO | Step 3: local_loss: 1.707783818244934 | global_loss: 0.7694584131240845 | total_loss: 2.4772422313690186\n",
      "2025-07-23 22:09:47 | INFO | Step 4: local_loss: 1.4232772588729858 | global_loss: 1.8987998962402344 | total_loss: 3.3220772743225098\n",
      "2025-07-23 22:10:45 | INFO | Step 5: local_loss: 1.4555257558822632 | global_loss: 0.7542322874069214 | total_loss: 2.2097580432891846\n",
      "2025-07-23 22:11:43 | INFO | Step 6: local_loss: 1.611743688583374 | global_loss: 0.9813686609268188 | total_loss: 2.5931124687194824\n",
      "2025-07-23 22:12:43 | INFO | Step 7: local_loss: 1.550143837928772 | global_loss: 1.3412930965423584 | total_loss: 2.89143705368042\n",
      "2025-07-23 22:13:41 | INFO | Step 8: local_loss: 1.4354305267333984 | global_loss: 0.6140592694282532 | total_loss: 2.049489736557007\n",
      "2025-07-23 22:14:39 | INFO | Step 9: local_loss: 1.4686710834503174 | global_loss: 1.0105063915252686 | total_loss: 2.479177474975586\n",
      "2025-07-23 22:15:38 | INFO | Step 10: local_loss: 1.2355149984359741 | global_loss: 1.062782645225525 | total_loss: 2.298297643661499\n",
      "2025-07-23 22:16:36 | INFO | Step 11: local_loss: 1.49159574508667 | global_loss: 1.1062378883361816 | total_loss: 2.5978336334228516\n",
      "2025-07-23 22:17:35 | INFO | Step 12: local_loss: 1.227452039718628 | global_loss: 0.8388319611549377 | total_loss: 2.066283941268921\n",
      "2025-07-23 22:18:34 | INFO | Step 13: local_loss: 1.349761962890625 | global_loss: 1.5716273784637451 | total_loss: 2.92138934135437\n",
      "2025-07-23 22:19:34 | INFO | Step 14: local_loss: 1.7282335758209229 | global_loss: 0.7615264058113098 | total_loss: 2.489759922027588\n",
      "2025-07-23 22:20:31 | INFO | Step 15: local_loss: 1.4305330514907837 | global_loss: 0.5029585957527161 | total_loss: 1.9334917068481445\n",
      "2025-07-23 22:21:30 | INFO | Step 16: local_loss: 1.4394047260284424 | global_loss: 0.4984208941459656 | total_loss: 1.9378256797790527\n",
      "2025-07-23 22:22:30 | INFO | Step 17: local_loss: 1.643801212310791 | global_loss: 0.6587061285972595 | total_loss: 2.3025074005126953\n",
      "2025-07-23 22:23:30 | INFO | Step 18: local_loss: 1.4949036836624146 | global_loss: 0.8321502208709717 | total_loss: 2.327054023742676\n",
      "2025-07-23 22:24:29 | INFO | Step 19: local_loss: 1.2400870323181152 | global_loss: 0.7765774130821228 | total_loss: 2.016664505004883\n",
      "2025-07-23 22:25:28 | INFO | Step 20: local_loss: 1.1310075521469116 | global_loss: 1.079875111579895 | total_loss: 2.2108826637268066\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-07-23 22:26:28 | INFO | Step 21: local_loss: 1.5375361442565918 | global_loss: 0.6428546905517578 | total_loss: 2.1803908348083496\n",
      "2025-07-23 22:27:26 | INFO | Step 22: local_loss: 1.1536415815353394 | global_loss: 1.0553619861602783 | total_loss: 2.209003448486328\n",
      "2025-07-23 22:28:25 | INFO | Step 23: local_loss: 1.2780853509902954 | global_loss: 1.076452612876892 | total_loss: 2.3545379638671875\n",
      "2025-07-23 22:29:25 | INFO | Step 24: local_loss: 1.2481471300125122 | global_loss: 0.6607335209846497 | total_loss: 1.9088807106018066\n",
      "2025-07-23 22:30:24 | INFO | Step 25: local_loss: 1.2638461589813232 | global_loss: 0.9126112461090088 | total_loss: 2.176457405090332\n",
      "2025-07-23 22:31:23 | INFO | Step 26: local_loss: 1.1045663356781006 | global_loss: 0.8092784881591797 | total_loss: 1.9138448238372803\n",
      "2025-07-23 22:32:23 | INFO | Step 27: local_loss: 1.3481931686401367 | global_loss: 0.45239385962486267 | total_loss: 1.8005870580673218\n",
      "2025-07-23 22:33:22 | INFO | Step 28: local_loss: 1.0427730083465576 | global_loss: 0.6157645583152771 | total_loss: 1.6585376262664795\n",
      "2025-07-23 22:34:21 | INFO | Step 29: local_loss: 1.6013931035995483 | global_loss: 0.7223644852638245 | total_loss: 2.3237576484680176\n",
      "2025-07-23 22:35:19 | INFO | Step 30: local_loss: 1.3778736591339111 | global_loss: 0.6386486291885376 | total_loss: 2.0165224075317383\n",
      "2025-07-23 22:36:17 | INFO | Step 31: local_loss: 1.1254041194915771 | global_loss: 0.5375730991363525 | total_loss: 1.6629772186279297\n",
      "2025-07-23 22:37:15 | INFO | Step 32: local_loss: 1.5916012525558472 | global_loss: 0.6620775461196899 | total_loss: 2.253678798675537\n",
      "2025-07-23 22:38:14 | INFO | Step 33: local_loss: 1.0788028240203857 | global_loss: 0.29773229360580444 | total_loss: 1.376535177230835\n",
      "2025-07-23 22:39:13 | INFO | Step 34: local_loss: 1.2687007188796997 | global_loss: 0.5676478147506714 | total_loss: 1.836348533630371\n",
      "2025-07-23 22:40:12 | INFO | Step 35: local_loss: 1.3404510021209717 | global_loss: 0.6975631713867188 | total_loss: 2.0380141735076904\n",
      "2025-07-23 22:41:11 | INFO | Step 36: local_loss: 1.083083987236023 | global_loss: 0.7250792980194092 | total_loss: 1.8081632852554321\n",
      "2025-07-23 22:42:08 | INFO | Step 37: local_loss: 1.0616035461425781 | global_loss: 0.3707897961139679 | total_loss: 1.4323933124542236\n",
      "2025-07-23 22:43:08 | INFO | Step 38: local_loss: 1.2887400388717651 | global_loss: 0.49312159419059753 | total_loss: 1.781861662864685\n",
      "2025-07-23 22:44:07 | INFO | Step 39: local_loss: 1.1173738241195679 | global_loss: 0.7778828144073486 | total_loss: 1.8952566385269165\n",
      "2025-07-23 22:45:07 | INFO | Step 40: local_loss: 1.2344372272491455 | global_loss: 0.9752849340438843 | total_loss: 2.2097220420837402\n",
      "2025-07-23 22:46:06 | INFO | Step 41: local_loss: 1.2628837823867798 | global_loss: 0.7041175961494446 | total_loss: 1.9670014381408691\n",
      "2025-07-23 22:47:05 | INFO | Step 42: local_loss: 1.2047512531280518 | global_loss: 0.9261577129364014 | total_loss: 2.130908966064453\n",
      "2025-07-23 22:48:04 | INFO | Step 43: local_loss: 0.9273282885551453 | global_loss: 0.8962022662162781 | total_loss: 1.8235305547714233\n",
      "2025-07-23 22:49:04 | INFO | Step 44: local_loss: 1.3902642726898193 | global_loss: 0.7310912609100342 | total_loss: 2.1213555335998535\n",
      "2025-07-23 22:50:02 | INFO | Step 45: local_loss: 1.1632425785064697 | global_loss: 0.5683404207229614 | total_loss: 1.7315829992294312\n",
      "2025-07-23 22:51:01 | INFO | Step 46: local_loss: 1.289131760597229 | global_loss: 0.48820602893829346 | total_loss: 1.7773377895355225\n",
      "2025-07-23 22:51:59 | INFO | Step 47: local_loss: 1.2319971323013306 | global_loss: 0.40545424818992615 | total_loss: 1.637451410293579\n",
      "2025-07-23 22:52:58 | INFO | Step 48: local_loss: 1.187145471572876 | global_loss: 0.5745433568954468 | total_loss: 1.7616888284683228\n",
      "2025-07-23 22:53:57 | INFO | Step 49: local_loss: 1.4557939767837524 | global_loss: 0.6055827736854553 | total_loss: 2.0613768100738525\n",
      "2025-07-23 22:54:56 | INFO | Step 50: local_loss: 1.1190398931503296 | global_loss: 0.5516869425773621 | total_loss: 1.6707267761230469\n",
      "2025-07-23 22:55:55 | INFO | Step 51: local_loss: 1.3587647676467896 | global_loss: 0.5611981153488159 | total_loss: 1.9199628829956055\n",
      "2025-07-23 22:56:52 | INFO | Step 52: local_loss: 1.2182003259658813 | global_loss: 0.35844388604164124 | total_loss: 1.5766441822052002\n",
      "2025-07-23 22:57:51 | INFO | Step 53: local_loss: 1.216256022453308 | global_loss: 0.4744720160961151 | total_loss: 1.6907280683517456\n",
      "2025-07-23 22:58:51 | INFO | Step 54: local_loss: 1.0221366882324219 | global_loss: 0.677192747592926 | total_loss: 1.6993293762207031\n",
      "2025-07-23 22:59:50 | INFO | Step 55: local_loss: 1.3604928255081177 | global_loss: 0.398164302110672 | total_loss: 1.7586570978164673\n",
      "2025-07-23 23:00:49 | INFO | Step 56: local_loss: 1.2611327171325684 | global_loss: 0.2281886637210846 | total_loss: 1.4893213510513306\n",
      "2025-07-23 23:01:46 | INFO | Step 57: local_loss: 1.0570039749145508 | global_loss: 0.3113033175468445 | total_loss: 1.36830735206604\n",
      "2025-07-23 23:02:46 | INFO | Step 58: local_loss: 1.3302361965179443 | global_loss: 0.5751724243164062 | total_loss: 1.9054086208343506\n",
      "2025-07-23 23:03:31 | INFO | Step 59: local_loss: 1.182037115097046 | global_loss: 0.47778233885765076 | total_loss: 1.659819483757019\n",
      "2025-07-23 23:04:28 | INFO | Step 60: local_loss: 0.9498220682144165 | global_loss: 0.5242966413497925 | total_loss: 1.474118709564209\n",
      "2025-07-23 23:05:29 | INFO | Step 61: local_loss: 1.2132952213287354 | global_loss: 0.17072194814682007 | total_loss: 1.3840172290802002\n",
      "2025-07-23 23:06:26 | INFO | Step 62: local_loss: 0.8517744541168213 | global_loss: 0.6772508025169373 | total_loss: 1.5290253162384033\n",
      "2025-07-23 23:07:24 | INFO | Step 63: local_loss: 1.1471308469772339 | global_loss: 0.2986750602722168 | total_loss: 1.4458059072494507\n",
      "2025-07-23 23:08:22 | INFO | Step 64: local_loss: 0.9870898127555847 | global_loss: 0.8912220001220703 | total_loss: 1.8783118724822998\n",
      "2025-07-23 23:09:21 | INFO | Step 65: local_loss: 1.0609616041183472 | global_loss: 0.7472164630889893 | total_loss: 1.8081780672073364\n",
      "2025-07-23 23:10:20 | INFO | Step 66: local_loss: 0.958301842212677 | global_loss: 1.1386841535568237 | total_loss: 2.0969860553741455\n",
      "2025-07-23 23:11:18 | INFO | Step 67: local_loss: 0.7368001937866211 | global_loss: 0.5328451991081238 | total_loss: 1.2696454524993896\n",
      "2025-07-23 23:12:18 | INFO | Step 68: local_loss: 1.2008953094482422 | global_loss: 0.5382866859436035 | total_loss: 1.7391819953918457\n",
      "2025-07-23 23:13:16 | INFO | Step 69: local_loss: 1.1288056373596191 | global_loss: 0.4703734219074249 | total_loss: 1.5991790294647217\n",
      "2025-07-23 23:14:14 | INFO | Step 70: local_loss: 0.9674888253211975 | global_loss: 0.3639026880264282 | total_loss: 1.3313915729522705\n",
      "2025-07-23 23:15:13 | INFO | Step 71: local_loss: 1.0366556644439697 | global_loss: 0.30322355031967163 | total_loss: 1.3398792743682861\n",
      "2025-07-23 23:16:11 | INFO | Step 72: local_loss: 1.2325732707977295 | global_loss: 0.5524015426635742 | total_loss: 1.7849748134613037\n",
      "2025-07-23 23:17:08 | INFO | Step 73: local_loss: 0.9185957908630371 | global_loss: 0.37740427255630493 | total_loss: 1.2960000038146973\n",
      "2025-07-23 23:18:07 | INFO | Step 74: local_loss: 1.166976809501648 | global_loss: 0.6043137311935425 | total_loss: 1.7712905406951904\n",
      "2025-07-23 23:19:05 | INFO | Step 75: local_loss: 1.0866976976394653 | global_loss: 0.6113318204879761 | total_loss: 1.6980295181274414\n",
      "2025-07-23 23:20:04 | INFO | Step 76: local_loss: 1.4893262386322021 | global_loss: 0.14378000795841217 | total_loss: 1.6331062316894531\n",
      "2025-07-23 23:21:01 | INFO | Step 77: local_loss: 0.9126105904579163 | global_loss: 0.5056761503219604 | total_loss: 1.4182868003845215\n",
      "2025-07-23 23:21:59 | INFO | Step 78: local_loss: 1.0654047727584839 | global_loss: 0.4839094877243042 | total_loss: 1.549314260482788\n",
      "2025-07-23 23:22:59 | INFO | Step 79: local_loss: 1.138554334640503 | global_loss: 0.37019282579421997 | total_loss: 1.5087471008300781\n",
      "2025-07-23 23:23:57 | INFO | Step 80: local_loss: 0.7616288065910339 | global_loss: 0.3842009902000427 | total_loss: 1.1458297967910767\n",
      "2025-07-23 23:26:52 | INFO | Step 83: local_loss: 1.138572096824646 | global_loss: 0.6294403076171875 | total_loss: 1.7680124044418335\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-07-23 23:27:50 | INFO | Step 84: local_loss: 1.0145306587219238 | global_loss: 0.40527042746543884 | total_loss: 1.419801115989685\n",
      "2025-07-23 23:28:49 | INFO | Step 85: local_loss: 0.891944408416748 | global_loss: 0.5563359260559082 | total_loss: 1.4482803344726562\n",
      "2025-07-23 23:29:46 | INFO | Step 86: local_loss: 1.0306624174118042 | global_loss: 0.19038932025432587 | total_loss: 1.2210516929626465\n",
      "2025-07-23 23:30:45 | INFO | Step 87: local_loss: 1.086035966873169 | global_loss: 0.4945910573005676 | total_loss: 1.5806269645690918\n",
      "2025-07-23 23:31:44 | INFO | Step 88: local_loss: 0.8335626125335693 | global_loss: 0.47565048933029175 | total_loss: 1.3092131614685059\n",
      "2025-07-23 23:32:44 | INFO | Step 89: local_loss: 1.1541309356689453 | global_loss: 0.4353109300136566 | total_loss: 1.5894418954849243\n",
      "2025-07-23 23:33:28 | INFO | Step 90: local_loss: 1.0503054857254028 | global_loss: 0.19850920140743256 | total_loss: 1.2488147020339966\n",
      "2025-07-23 23:34:26 | INFO | Step 91: local_loss: 1.1454839706420898 | global_loss: 0.16100680828094482 | total_loss: 1.3064907789230347\n",
      "2025-07-23 23:35:25 | INFO | Step 92: local_loss: 0.7487927079200745 | global_loss: 0.4504099488258362 | total_loss: 1.1992026567459106\n",
      "2025-07-23 23:36:24 | INFO | Step 93: local_loss: 0.9687655568122864 | global_loss: 0.49770107865333557 | total_loss: 1.4664666652679443\n",
      "2025-07-23 23:37:23 | INFO | Step 94: local_loss: 1.0145840644836426 | global_loss: 0.34584417939186096 | total_loss: 1.3604282140731812\n",
      "2025-07-23 23:38:21 | INFO | Step 95: local_loss: 0.9793177843093872 | global_loss: 0.1643587052822113 | total_loss: 1.143676519393921\n",
      "2025-07-23 23:39:20 | INFO | Step 96: local_loss: 0.8621503114700317 | global_loss: 0.6976683139801025 | total_loss: 1.5598186254501343\n",
      "2025-07-23 23:40:20 | INFO | Step 97: local_loss: 1.0536972284317017 | global_loss: 0.4660639464855194 | total_loss: 1.5197612047195435\n",
      "2025-07-23 23:41:04 | INFO | Step 98: local_loss: 0.9355655312538147 | global_loss: 0.35913074016571045 | total_loss: 1.29469633102417\n",
      "2025-07-23 23:42:02 | INFO | Step 99: local_loss: 1.4507687091827393 | global_loss: 0.28245219588279724 | total_loss: 1.7332209348678589\n",
      "2025-07-23 23:42:02 | INFO | Center node 6714 finished at step 100 with loss 1.7332\n",
      "2025-07-23 23:42:02 | INFO | \n",
      "=== Pretraining Epoch 5 (Center Node: 7617) ===\n",
      "2025-07-23 23:43:02 | INFO | Step 0: local_loss: 3.438575267791748 | global_loss: 2.662449598312378 | total_loss: 6.101024627685547\n",
      "2025-07-23 23:43:48 | INFO | Step 1: local_loss: 3.410220146179199 | global_loss: 2.200101613998413 | total_loss: 5.610321998596191\n",
      "2025-07-23 23:44:47 | INFO | Step 2: local_loss: 3.479987382888794 | global_loss: 2.4194819927215576 | total_loss: 5.899469375610352\n",
      "2025-07-23 23:45:48 | INFO | Step 3: local_loss: 3.3846964836120605 | global_loss: 2.1636083126068115 | total_loss: 5.548304557800293\n",
      "2025-07-23 23:46:46 | INFO | Step 4: local_loss: 3.2753381729125977 | global_loss: 2.744976043701172 | total_loss: 6.0203142166137695\n",
      "2025-07-23 23:47:44 | INFO | Step 5: local_loss: 3.0389678478240967 | global_loss: 2.9855268001556396 | total_loss: 6.024494647979736\n",
      "2025-07-23 23:48:29 | INFO | Step 6: local_loss: 3.700366973876953 | global_loss: 2.8241517543792725 | total_loss: 6.524518966674805\n",
      "2025-07-23 23:49:25 | INFO | Step 7: local_loss: 3.1218371391296387 | global_loss: 2.1157033443450928 | total_loss: 5.237540245056152\n",
      "2025-07-23 23:50:26 | INFO | Step 8: local_loss: 3.13651967048645 | global_loss: 2.2493627071380615 | total_loss: 5.385882377624512\n",
      "2025-07-23 23:51:24 | INFO | Step 9: local_loss: 2.9414937496185303 | global_loss: 2.1647517681121826 | total_loss: 5.106245517730713\n",
      "2025-07-23 23:52:23 | INFO | Step 10: local_loss: 3.653933525085449 | global_loss: 2.744706630706787 | total_loss: 6.398640155792236\n",
      "2025-07-23 23:53:23 | INFO | Step 11: local_loss: 3.222510814666748 | global_loss: 2.801706075668335 | total_loss: 6.024216651916504\n",
      "2025-07-23 23:54:22 | INFO | Step 12: local_loss: 3.4784135818481445 | global_loss: 2.6727068424224854 | total_loss: 6.151120185852051\n",
      "2025-07-23 23:55:20 | INFO | Step 13: local_loss: 3.4677441120147705 | global_loss: 2.5764424800872803 | total_loss: 6.044186592102051\n",
      "2025-07-23 23:56:20 | INFO | Step 14: local_loss: 3.6180524826049805 | global_loss: 2.808363437652588 | total_loss: 6.426415920257568\n",
      "2025-07-23 23:57:20 | INFO | Step 15: local_loss: 3.725917339324951 | global_loss: 1.3644040822982788 | total_loss: 5.0903215408325195\n",
      "2025-07-23 23:58:20 | INFO | Step 16: local_loss: 3.035665273666382 | global_loss: 2.4472782611846924 | total_loss: 5.482943534851074\n",
      "2025-07-23 23:59:21 | INFO | Step 17: local_loss: 2.895408868789673 | global_loss: 2.579268455505371 | total_loss: 5.474677085876465\n",
      "2025-07-24 00:00:20 | INFO | Step 18: local_loss: 3.4277966022491455 | global_loss: 2.730607748031616 | total_loss: 6.158404350280762\n",
      "2025-07-24 00:01:19 | INFO | Step 19: local_loss: 3.559904098510742 | global_loss: 2.5941720008850098 | total_loss: 6.154076099395752\n",
      "2025-07-24 00:02:19 | INFO | Step 20: local_loss: 3.170818328857422 | global_loss: 2.6366095542907715 | total_loss: 5.807427883148193\n",
      "2025-07-24 00:03:18 | INFO | Step 21: local_loss: 3.0463173389434814 | global_loss: 2.177342653274536 | total_loss: 5.223659992218018\n",
      "2025-07-24 00:04:17 | INFO | Step 22: local_loss: 3.4589014053344727 | global_loss: 2.6682205200195312 | total_loss: 6.127121925354004\n",
      "2025-07-24 00:05:15 | INFO | Step 23: local_loss: 3.276594877243042 | global_loss: 2.9227712154388428 | total_loss: 6.199366092681885\n",
      "2025-07-24 00:06:13 | INFO | Step 24: local_loss: 3.810964822769165 | global_loss: 3.0249509811401367 | total_loss: 6.835915565490723\n",
      "2025-07-24 00:07:12 | INFO | Step 25: local_loss: 3.3073315620422363 | global_loss: 2.251347064971924 | total_loss: 5.55867862701416\n",
      "2025-07-24 00:08:13 | INFO | Step 26: local_loss: 3.1477959156036377 | global_loss: 2.532191276550293 | total_loss: 5.679986953735352\n",
      "2025-07-24 00:09:11 | INFO | Step 27: local_loss: 3.2371745109558105 | global_loss: 1.548845648765564 | total_loss: 4.786020278930664\n",
      "2025-07-24 00:10:11 | INFO | Step 28: local_loss: 3.0758626461029053 | global_loss: 2.0039258003234863 | total_loss: 5.0797882080078125\n",
      "2025-07-24 00:11:10 | INFO | Step 29: local_loss: 2.8841021060943604 | global_loss: 2.0883214473724365 | total_loss: 4.972423553466797\n",
      "2025-07-24 00:12:08 | INFO | Step 30: local_loss: 3.3648481369018555 | global_loss: 1.4390275478363037 | total_loss: 4.803875923156738\n",
      "2025-07-24 00:13:07 | INFO | Step 31: local_loss: 2.5971312522888184 | global_loss: 1.5097386837005615 | total_loss: 4.106869697570801\n",
      "2025-07-24 00:14:04 | INFO | Step 32: local_loss: 3.125645160675049 | global_loss: 2.4491426944732666 | total_loss: 5.5747880935668945\n",
      "2025-07-24 00:15:02 | INFO | Step 33: local_loss: 2.6994547843933105 | global_loss: 2.1799516677856445 | total_loss: 4.879406452178955\n",
      "2025-07-24 00:16:02 | INFO | Step 34: local_loss: 3.567267656326294 | global_loss: 2.0596976280212402 | total_loss: 5.626965522766113\n",
      "2025-07-24 00:17:01 | INFO | Step 35: local_loss: 2.740387439727783 | global_loss: 2.7667438983917236 | total_loss: 5.507131576538086\n",
      "2025-07-24 00:18:01 | INFO | Step 36: local_loss: 3.2596428394317627 | global_loss: 1.6432732343673706 | total_loss: 4.902915954589844\n",
      "2025-07-24 00:19:00 | INFO | Step 37: local_loss: 3.2241039276123047 | global_loss: 1.9451955556869507 | total_loss: 5.169299602508545\n",
      "2025-07-24 00:19:59 | INFO | Step 38: local_loss: 3.1087982654571533 | global_loss: 2.444612979888916 | total_loss: 5.553411483764648\n",
      "2025-07-24 00:20:58 | INFO | Step 39: local_loss: 3.4439501762390137 | global_loss: 1.2417782545089722 | total_loss: 4.685728549957275\n",
      "2025-07-24 00:21:58 | INFO | Step 40: local_loss: 3.52142071723938 | global_loss: 3.2339653968811035 | total_loss: 6.7553863525390625\n",
      "2025-07-24 00:22:42 | INFO | Step 41: local_loss: 2.9932408332824707 | global_loss: 2.8591084480285645 | total_loss: 5.852349281311035\n",
      "2025-07-24 00:23:39 | INFO | Step 42: local_loss: 3.121276378631592 | global_loss: 2.0526187419891357 | total_loss: 5.173894882202148\n",
      "2025-07-24 00:24:38 | INFO | Step 43: local_loss: 3.567223072052002 | global_loss: 2.0371005535125732 | total_loss: 5.604323387145996\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-07-24 00:25:36 | INFO | Step 44: local_loss: 3.5734703540802 | global_loss: 2.183786392211914 | total_loss: 5.757256507873535\n",
      "2025-07-24 00:26:34 | INFO | Step 45: local_loss: 3.0353500843048096 | global_loss: 2.2621829509735107 | total_loss: 5.29753303527832\n",
      "2025-07-24 00:27:33 | INFO | Step 46: local_loss: 3.1679189205169678 | global_loss: 2.4446871280670166 | total_loss: 5.612606048583984\n",
      "2025-07-24 00:28:32 | INFO | Step 47: local_loss: 3.1425621509552 | global_loss: 2.0160512924194336 | total_loss: 5.158613204956055\n",
      "2025-07-24 00:29:31 | INFO | Step 48: local_loss: 3.1824593544006348 | global_loss: 2.2192108631134033 | total_loss: 5.401670455932617\n",
      "2025-07-24 00:30:31 | INFO | Step 49: local_loss: 3.089972972869873 | global_loss: 2.438037633895874 | total_loss: 5.528010368347168\n",
      "2025-07-24 00:31:30 | INFO | Step 50: local_loss: 3.244584083557129 | global_loss: 1.8821830749511719 | total_loss: 5.126767158508301\n",
      "2025-07-24 00:32:15 | INFO | Step 51: local_loss: 3.0681917667388916 | global_loss: 1.660923719406128 | total_loss: 4.7291154861450195\n",
      "2025-07-24 00:33:13 | INFO | Step 52: local_loss: 3.4469218254089355 | global_loss: 1.1562318801879883 | total_loss: 4.603153705596924\n",
      "2025-07-24 00:34:11 | INFO | Step 53: local_loss: 2.974309206008911 | global_loss: 1.307447910308838 | total_loss: 4.281757354736328\n",
      "2025-07-24 00:35:10 | INFO | Step 54: local_loss: 3.0349230766296387 | global_loss: 1.8051745891571045 | total_loss: 4.840097427368164\n",
      "2025-07-24 00:36:09 | INFO | Step 55: local_loss: 3.1388299465179443 | global_loss: 1.492031216621399 | total_loss: 4.630861282348633\n",
      "2025-07-24 00:37:06 | INFO | Step 56: local_loss: 2.833360195159912 | global_loss: 1.9798675775527954 | total_loss: 4.813227653503418\n",
      "2025-07-24 00:38:03 | INFO | Step 57: local_loss: 3.022731304168701 | global_loss: 2.387181520462036 | total_loss: 5.409913063049316\n",
      "2025-07-24 00:39:02 | INFO | Step 58: local_loss: 2.9925146102905273 | global_loss: 1.756654143333435 | total_loss: 4.749168872833252\n",
      "2025-07-24 00:40:01 | INFO | Step 59: local_loss: 3.1636650562286377 | global_loss: 1.6717185974121094 | total_loss: 4.835383415222168\n",
      "2025-07-24 00:41:00 | INFO | Step 60: local_loss: 3.0749449729919434 | global_loss: 2.4143548011779785 | total_loss: 5.489299774169922\n",
      "2025-07-24 00:41:59 | INFO | Step 61: local_loss: 2.9303925037384033 | global_loss: 1.2657346725463867 | total_loss: 4.196126937866211\n",
      "2025-07-24 00:42:58 | INFO | Step 62: local_loss: 3.274242401123047 | global_loss: 2.406402111053467 | total_loss: 5.680644512176514\n",
      "2025-07-24 00:43:58 | INFO | Step 63: local_loss: 3.404815673828125 | global_loss: 1.869380235671997 | total_loss: 5.274195671081543\n",
      "2025-07-24 00:44:57 | INFO | Step 64: local_loss: 3.2912814617156982 | global_loss: 1.3138967752456665 | total_loss: 4.605178356170654\n",
      "2025-07-24 00:45:57 | INFO | Step 65: local_loss: 2.88712215423584 | global_loss: 1.8947726488113403 | total_loss: 4.781894683837891\n",
      "2025-07-24 00:46:55 | INFO | Step 66: local_loss: 3.0220963954925537 | global_loss: 1.9603620767593384 | total_loss: 4.982458591461182\n",
      "2025-07-24 00:47:54 | INFO | Step 67: local_loss: 3.0258312225341797 | global_loss: 2.21992564201355 | total_loss: 5.245757102966309\n",
      "2025-07-24 00:48:53 | INFO | Step 68: local_loss: 2.7350709438323975 | global_loss: 1.0113636255264282 | total_loss: 3.7464346885681152\n",
      "2025-07-24 00:49:52 | INFO | Step 69: local_loss: 2.578056812286377 | global_loss: 1.2396435737609863 | total_loss: 3.8177003860473633\n",
      "2025-07-24 00:50:50 | INFO | Step 70: local_loss: 3.208028554916382 | global_loss: 1.5750818252563477 | total_loss: 4.783110618591309\n",
      "2025-07-24 00:51:48 | INFO | Step 71: local_loss: 3.346332550048828 | global_loss: 0.7170350551605225 | total_loss: 4.06336784362793\n",
      "2025-07-24 00:52:47 | INFO | Step 72: local_loss: 3.106881618499756 | global_loss: 1.6063497066497803 | total_loss: 4.713231086730957\n",
      "2025-07-24 00:53:47 | INFO | Step 73: local_loss: 2.6511569023132324 | global_loss: 2.534116268157959 | total_loss: 5.185273170471191\n",
      "2025-07-24 00:54:47 | INFO | Step 74: local_loss: 2.4887373447418213 | global_loss: 1.3479193449020386 | total_loss: 3.8366565704345703\n",
      "2025-07-24 00:55:46 | INFO | Step 75: local_loss: 2.5013163089752197 | global_loss: 1.3457597494125366 | total_loss: 3.847075939178467\n",
      "2025-07-24 00:56:45 | INFO | Step 76: local_loss: 3.8417320251464844 | global_loss: 0.9857415556907654 | total_loss: 4.8274736404418945\n",
      "2025-07-24 00:57:45 | INFO | Step 77: local_loss: 3.22580623626709 | global_loss: 1.1927425861358643 | total_loss: 4.418548583984375\n",
      "2025-07-24 00:58:44 | INFO | Step 78: local_loss: 2.6563737392425537 | global_loss: 1.0706787109375 | total_loss: 3.7270524501800537\n",
      "2025-07-24 00:59:41 | INFO | Step 79: local_loss: 2.4318864345550537 | global_loss: 1.0353827476501465 | total_loss: 3.4672691822052\n",
      "2025-07-24 01:00:40 | INFO | Step 80: local_loss: 2.670322895050049 | global_loss: 2.42287540435791 | total_loss: 5.093198299407959\n",
      "2025-07-24 01:01:39 | INFO | Step 81: local_loss: 2.618321418762207 | global_loss: 1.2849764823913574 | total_loss: 3.9032979011535645\n",
      "2025-07-24 01:02:38 | INFO | Step 82: local_loss: 2.6943774223327637 | global_loss: 2.5510215759277344 | total_loss: 5.245398998260498\n",
      "2025-07-24 01:03:37 | INFO | Step 83: local_loss: 2.4772181510925293 | global_loss: 1.329917073249817 | total_loss: 3.8071351051330566\n",
      "2025-07-24 01:04:36 | INFO | Step 84: local_loss: 2.6068942546844482 | global_loss: 1.8011256456375122 | total_loss: 4.40802001953125\n",
      "2025-07-24 01:05:36 | INFO | Step 85: local_loss: 2.9106762409210205 | global_loss: 1.0727986097335815 | total_loss: 3.9834747314453125\n",
      "2025-07-24 01:06:34 | INFO | Step 86: local_loss: 2.243724822998047 | global_loss: 1.3469269275665283 | total_loss: 3.590651750564575\n",
      "2025-07-24 01:07:19 | INFO | Step 87: local_loss: 3.351574659347534 | global_loss: 1.7308090925216675 | total_loss: 5.082383632659912\n",
      "2025-07-24 01:08:17 | INFO | Step 88: local_loss: 2.6470324993133545 | global_loss: 0.8134663105010986 | total_loss: 3.460498809814453\n",
      "2025-07-24 01:09:16 | INFO | Step 89: local_loss: 3.4188249111175537 | global_loss: 0.9204953908920288 | total_loss: 4.339320182800293\n",
      "2025-07-24 01:10:14 | INFO | Step 90: local_loss: 2.6679036617279053 | global_loss: 1.2326021194458008 | total_loss: 3.900505781173706\n",
      "2025-07-24 01:11:13 | INFO | Step 91: local_loss: 3.23708176612854 | global_loss: 1.0568699836730957 | total_loss: 4.293951988220215\n",
      "2025-07-24 01:11:57 | INFO | Step 92: local_loss: 3.484818458557129 | global_loss: 3.439072847366333 | total_loss: 6.923891067504883\n",
      "2025-07-24 01:12:54 | INFO | Step 93: local_loss: 2.8632826805114746 | global_loss: 1.8545663356781006 | total_loss: 4.717848777770996\n",
      "2025-07-24 01:13:54 | INFO | Step 94: local_loss: 3.417980909347534 | global_loss: 1.2612388134002686 | total_loss: 4.679219722747803\n",
      "2025-07-24 01:14:53 | INFO | Step 95: local_loss: 2.582491159439087 | global_loss: 1.386504054069519 | total_loss: 3.9689950942993164\n",
      "2025-07-24 01:15:52 | INFO | Step 96: local_loss: 2.860264778137207 | global_loss: 1.240660548210144 | total_loss: 4.100925445556641\n",
      "2025-07-24 01:16:50 | INFO | Step 97: local_loss: 2.5859997272491455 | global_loss: 1.1207246780395508 | total_loss: 3.7067244052886963\n",
      "2025-07-24 01:17:49 | INFO | Step 98: local_loss: 2.756295919418335 | global_loss: 1.692949652671814 | total_loss: 4.449245452880859\n",
      "2025-07-24 01:18:47 | INFO | Step 99: local_loss: 2.8675999641418457 | global_loss: 1.5614848136901855 | total_loss: 4.429084777832031\n",
      "2025-07-24 01:18:47 | INFO | Center node 7617 finished at step 100 with loss 4.4291\n",
      "2025-07-24 01:18:47 | INFO | \n",
      "=== Pretraining Epoch 6 (Center Node: 297) ===\n",
      "2025-07-24 01:19:46 | INFO | Step 0: local_loss: 4.1658935546875 | global_loss: 2.5119545459747314 | total_loss: 6.677847862243652\n",
      "2025-07-24 01:20:45 | INFO | Step 1: local_loss: 3.1461968421936035 | global_loss: 2.9347665309906006 | total_loss: 6.080963134765625\n",
      "2025-07-24 01:21:43 | INFO | Step 2: local_loss: 3.392906427383423 | global_loss: 2.340747356414795 | total_loss: 5.733654022216797\n",
      "2025-07-24 01:22:27 | INFO | Step 3: local_loss: 2.99794602394104 | global_loss: 2.6266534328460693 | total_loss: 5.624599456787109\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-07-24 01:23:23 | INFO | Step 4: local_loss: 3.3408219814300537 | global_loss: 3.6265299320220947 | total_loss: 6.967351913452148\n",
      "2025-07-24 01:24:22 | INFO | Step 5: local_loss: 2.649113655090332 | global_loss: 2.4317007064819336 | total_loss: 5.080814361572266\n",
      "2025-07-24 01:25:21 | INFO | Step 6: local_loss: 2.8567473888397217 | global_loss: 2.600533962249756 | total_loss: 5.457281112670898\n",
      "2025-07-24 01:26:20 | INFO | Step 7: local_loss: 3.0792765617370605 | global_loss: 2.7439653873443604 | total_loss: 5.8232421875\n",
      "2025-07-24 01:27:18 | INFO | Step 8: local_loss: 3.047973155975342 | global_loss: 2.1004085540771484 | total_loss: 5.14838171005249\n",
      "2025-07-24 01:28:16 | INFO | Step 9: local_loss: 3.1987664699554443 | global_loss: 2.2457497119903564 | total_loss: 5.444516181945801\n",
      "2025-07-24 01:29:15 | INFO | Step 10: local_loss: 3.164299249649048 | global_loss: 3.9438230991363525 | total_loss: 7.1081223487854\n",
      "2025-07-24 01:30:11 | INFO | Step 11: local_loss: 3.117698907852173 | global_loss: 3.19342041015625 | total_loss: 6.311119079589844\n",
      "2025-07-24 01:31:09 | INFO | Step 12: local_loss: 2.8574283123016357 | global_loss: 2.8607711791992188 | total_loss: 5.718199729919434\n",
      "2025-07-24 01:32:07 | INFO | Step 13: local_loss: 2.7369914054870605 | global_loss: 2.606529951095581 | total_loss: 5.3435211181640625\n",
      "2025-07-24 01:33:05 | INFO | Step 14: local_loss: 3.150960683822632 | global_loss: 2.6287829875946045 | total_loss: 5.779743671417236\n",
      "2025-07-24 01:34:04 | INFO | Step 15: local_loss: 3.267908811569214 | global_loss: 2.3575305938720703 | total_loss: 5.625439643859863\n",
      "2025-07-24 01:35:03 | INFO | Step 16: local_loss: 2.719431161880493 | global_loss: 3.067439556121826 | total_loss: 5.786870956420898\n",
      "2025-07-24 01:36:01 | INFO | Step 17: local_loss: 3.0786752700805664 | global_loss: 1.60463285446167 | total_loss: 4.683308124542236\n",
      "2025-07-24 01:37:00 | INFO | Step 18: local_loss: 2.9701969623565674 | global_loss: 2.2572836875915527 | total_loss: 5.227480888366699\n",
      "2025-07-24 01:37:59 | INFO | Step 19: local_loss: 2.8518481254577637 | global_loss: 2.9497897624969482 | total_loss: 5.801637649536133\n",
      "2025-07-24 01:38:57 | INFO | Step 20: local_loss: 3.0007379055023193 | global_loss: 2.8623239994049072 | total_loss: 5.863061904907227\n",
      "2025-07-24 01:39:56 | INFO | Step 21: local_loss: 2.6777873039245605 | global_loss: 1.9969391822814941 | total_loss: 4.674726486206055\n",
      "2025-07-24 01:40:54 | INFO | Step 22: local_loss: 2.89635968208313 | global_loss: 3.596099615097046 | total_loss: 6.492459297180176\n",
      "2025-07-24 01:41:52 | INFO | Step 23: local_loss: 2.9647815227508545 | global_loss: 2.753999710083008 | total_loss: 5.718781471252441\n",
      "2025-07-24 01:42:50 | INFO | Step 24: local_loss: 2.939432382583618 | global_loss: 2.252969264984131 | total_loss: 5.192401885986328\n",
      "2025-07-24 01:43:34 | INFO | Step 25: local_loss: 3.1655654907226562 | global_loss: 1.6914398670196533 | total_loss: 4.8570051193237305\n",
      "2025-07-24 01:44:32 | INFO | Step 26: local_loss: 3.1399636268615723 | global_loss: 2.594330310821533 | total_loss: 5.7342939376831055\n",
      "2025-07-24 01:45:30 | INFO | Step 27: local_loss: 2.6470744609832764 | global_loss: 1.9706470966339111 | total_loss: 4.6177215576171875\n",
      "2025-07-24 01:46:29 | INFO | Step 28: local_loss: 3.0407958030700684 | global_loss: 2.4885215759277344 | total_loss: 5.529317378997803\n",
      "2025-07-24 01:47:28 | INFO | Step 29: local_loss: 2.966193437576294 | global_loss: 2.300961494445801 | total_loss: 5.267154693603516\n",
      "2025-07-24 01:48:25 | INFO | Step 30: local_loss: 2.7312066555023193 | global_loss: 2.3706109523773193 | total_loss: 5.101817607879639\n",
      "2025-07-24 01:49:24 | INFO | Step 31: local_loss: 3.539614677429199 | global_loss: 2.4010088443756104 | total_loss: 5.9406232833862305\n",
      "2025-07-24 01:50:22 | INFO | Step 32: local_loss: 3.1997084617614746 | global_loss: 2.6466991901397705 | total_loss: 5.846407890319824\n",
      "2025-07-24 01:51:20 | INFO | Step 33: local_loss: 2.8366668224334717 | global_loss: 1.9830279350280762 | total_loss: 4.819694519042969\n",
      "2025-07-24 01:52:18 | INFO | Step 34: local_loss: 2.8642725944519043 | global_loss: 2.7395198345184326 | total_loss: 5.603792190551758\n",
      "2025-07-24 01:53:18 | INFO | Step 35: local_loss: 2.9774601459503174 | global_loss: 2.645310401916504 | total_loss: 5.622770309448242\n",
      "2025-07-24 01:54:16 | INFO | Step 36: local_loss: 3.3041038513183594 | global_loss: 2.2138078212738037 | total_loss: 5.517911911010742\n",
      "2025-07-24 01:55:14 | INFO | Step 37: local_loss: 3.6514830589294434 | global_loss: 1.7359042167663574 | total_loss: 5.387387275695801\n",
      "2025-07-24 01:56:13 | INFO | Step 38: local_loss: 2.840296506881714 | global_loss: 2.7999861240386963 | total_loss: 5.64028263092041\n",
      "2025-07-24 01:56:57 | INFO | Step 39: local_loss: 2.7925548553466797 | global_loss: 2.9279394149780273 | total_loss: 5.720494270324707\n",
      "2025-07-24 01:57:55 | INFO | Step 40: local_loss: 3.025742769241333 | global_loss: 1.9042341709136963 | total_loss: 4.929976940155029\n",
      "2025-07-24 01:58:53 | INFO | Step 41: local_loss: 2.9666945934295654 | global_loss: 2.663926601409912 | total_loss: 5.630620956420898\n",
      "2025-07-24 01:59:51 | INFO | Step 42: local_loss: 2.924760103225708 | global_loss: 2.218510627746582 | total_loss: 5.143270492553711\n",
      "2025-07-24 02:00:49 | INFO | Step 43: local_loss: 2.844268798828125 | global_loss: 2.4107093811035156 | total_loss: 5.254978179931641\n",
      "2025-07-24 02:01:46 | INFO | Step 44: local_loss: 2.9185240268707275 | global_loss: 1.3405600786209106 | total_loss: 4.259084224700928\n",
      "2025-07-24 02:02:44 | INFO | Step 45: local_loss: 2.7444543838500977 | global_loss: 1.6940749883651733 | total_loss: 4.4385294914245605\n",
      "2025-07-24 02:03:40 | INFO | Step 46: local_loss: 2.6453182697296143 | global_loss: 2.014045000076294 | total_loss: 4.659363269805908\n",
      "2025-07-24 02:04:37 | INFO | Step 47: local_loss: 2.883948802947998 | global_loss: 1.728309154510498 | total_loss: 4.612257957458496\n",
      "2025-07-24 02:05:21 | INFO | Step 48: local_loss: 2.6080009937286377 | global_loss: 1.7932679653167725 | total_loss: 4.40126895904541\n",
      "2025-07-24 02:06:18 | INFO | Step 49: local_loss: 3.2557919025421143 | global_loss: 2.2709877490997314 | total_loss: 5.526779651641846\n",
      "2025-07-24 02:07:16 | INFO | Step 50: local_loss: 2.8516335487365723 | global_loss: 2.0878212451934814 | total_loss: 4.939455032348633\n",
      "2025-07-24 02:08:13 | INFO | Step 51: local_loss: 2.9955532550811768 | global_loss: 2.3284006118774414 | total_loss: 5.323953628540039\n",
      "2025-07-24 02:09:10 | INFO | Step 52: local_loss: 3.027674913406372 | global_loss: 2.0483455657958984 | total_loss: 5.076020240783691\n",
      "2025-07-24 02:10:27 | INFO | Step 53: local_loss: 2.7101268768310547 | global_loss: 2.3525619506835938 | total_loss: 5.062688827514648\n",
      "2025-07-24 02:11:28 | INFO | Step 54: local_loss: 2.8062634468078613 | global_loss: 1.991680383682251 | total_loss: 4.797944068908691\n",
      "2025-07-24 02:12:27 | INFO | Step 55: local_loss: 2.620012044906616 | global_loss: 1.7670766115188599 | total_loss: 4.387088775634766\n",
      "2025-07-24 02:13:25 | INFO | Step 56: local_loss: 2.6362674236297607 | global_loss: 2.0577621459960938 | total_loss: 4.694029808044434\n",
      "2025-07-24 02:14:22 | INFO | Step 57: local_loss: 2.4669296741485596 | global_loss: 1.7525883913040161 | total_loss: 4.219518184661865\n",
      "2025-07-24 02:15:19 | INFO | Step 58: local_loss: 2.75356125831604 | global_loss: 2.7726686000823975 | total_loss: 5.5262298583984375\n",
      "2025-07-24 02:16:17 | INFO | Step 59: local_loss: 3.098301887512207 | global_loss: 1.6199536323547363 | total_loss: 4.718255519866943\n",
      "2025-07-24 02:17:14 | INFO | Step 60: local_loss: 2.3912205696105957 | global_loss: 2.547649621963501 | total_loss: 4.938870429992676\n",
      "2025-07-24 02:18:12 | INFO | Step 61: local_loss: 2.998297929763794 | global_loss: 1.4060276746749878 | total_loss: 4.404325485229492\n",
      "2025-07-24 02:19:09 | INFO | Step 62: local_loss: 2.6712255477905273 | global_loss: 2.3147213459014893 | total_loss: 4.9859466552734375\n",
      "2025-07-24 02:20:05 | INFO | Step 63: local_loss: 2.750579833984375 | global_loss: 2.1838150024414062 | total_loss: 4.934394836425781\n",
      "2025-07-24 02:21:03 | INFO | Step 64: local_loss: 3.1071014404296875 | global_loss: 1.7889355421066284 | total_loss: 4.8960371017456055\n",
      "2025-07-24 02:22:01 | INFO | Step 65: local_loss: 3.2590510845184326 | global_loss: 1.4387918710708618 | total_loss: 4.697843074798584\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-07-24 02:22:59 | INFO | Step 66: local_loss: 2.9430313110351562 | global_loss: 1.5497233867645264 | total_loss: 4.492754936218262\n",
      "2025-07-24 02:23:56 | INFO | Step 67: local_loss: 3.0901384353637695 | global_loss: 1.2172231674194336 | total_loss: 4.307361602783203\n",
      "2025-07-24 02:24:54 | INFO | Step 68: local_loss: 3.0884928703308105 | global_loss: 1.9540539979934692 | total_loss: 5.04254674911499\n",
      "2025-07-24 02:25:52 | INFO | Step 69: local_loss: 2.7059683799743652 | global_loss: 2.3031327724456787 | total_loss: 5.009100914001465\n",
      "2025-07-24 02:26:51 | INFO | Step 70: local_loss: 2.5403664112091064 | global_loss: 3.3981645107269287 | total_loss: 5.938530921936035\n",
      "2025-07-24 02:27:51 | INFO | Step 71: local_loss: 3.14987850189209 | global_loss: 2.9979190826416016 | total_loss: 6.147797584533691\n",
      "2025-07-24 02:28:49 | INFO | Step 72: local_loss: 2.7752296924591064 | global_loss: 2.1239030361175537 | total_loss: 4.89913272857666\n",
      "2025-07-24 02:29:48 | INFO | Step 73: local_loss: 2.6191582679748535 | global_loss: 1.7025905847549438 | total_loss: 4.321748733520508\n",
      "2025-07-24 02:30:46 | INFO | Step 74: local_loss: 2.883784055709839 | global_loss: 1.4546279907226562 | total_loss: 4.338412284851074\n",
      "2025-07-24 02:31:46 | INFO | Step 75: local_loss: 2.3848118782043457 | global_loss: 1.441909909248352 | total_loss: 3.826721668243408\n",
      "2025-07-24 02:32:45 | INFO | Step 76: local_loss: 2.758826494216919 | global_loss: 2.2824766635894775 | total_loss: 5.0413031578063965\n",
      "2025-07-24 02:33:44 | INFO | Step 77: local_loss: 2.6150872707366943 | global_loss: 1.1956398487091064 | total_loss: 3.810727119445801\n",
      "2025-07-24 02:34:43 | INFO | Step 78: local_loss: 3.1317474842071533 | global_loss: 2.489407777786255 | total_loss: 5.621155261993408\n",
      "2025-07-24 02:35:40 | INFO | Step 79: local_loss: 2.7163732051849365 | global_loss: 1.6450889110565186 | total_loss: 4.361462116241455\n",
      "2025-07-24 02:36:38 | INFO | Step 80: local_loss: 2.388622283935547 | global_loss: 1.4053871631622314 | total_loss: 3.7940094470977783\n",
      "2025-07-24 02:37:37 | INFO | Step 81: local_loss: 2.514263153076172 | global_loss: 1.806453824043274 | total_loss: 4.320716857910156\n",
      "2025-07-24 02:38:35 | INFO | Step 82: local_loss: 2.65248703956604 | global_loss: 2.2557711601257324 | total_loss: 4.908258438110352\n",
      "2025-07-24 02:39:33 | INFO | Step 83: local_loss: 2.7158498764038086 | global_loss: 2.7372701168060303 | total_loss: 5.453120231628418\n",
      "2025-07-24 02:40:30 | INFO | Step 84: local_loss: 2.714355707168579 | global_loss: 2.2563018798828125 | total_loss: 4.9706573486328125\n",
      "2025-07-24 02:41:28 | INFO | Step 85: local_loss: 2.5051522254943848 | global_loss: 1.3227436542510986 | total_loss: 3.8278958797454834\n",
      "2025-07-24 02:42:27 | INFO | Step 86: local_loss: 2.293886423110962 | global_loss: 2.0180790424346924 | total_loss: 4.311965465545654\n",
      "2025-07-24 02:43:23 | INFO | Step 87: local_loss: 2.4339425563812256 | global_loss: 1.740195631980896 | total_loss: 4.174138069152832\n",
      "2025-07-24 02:44:21 | INFO | Step 88: local_loss: 2.609598398208618 | global_loss: 2.1311588287353516 | total_loss: 4.740756988525391\n",
      "2025-07-24 02:45:19 | INFO | Step 89: local_loss: 2.8549695014953613 | global_loss: 1.4624550342559814 | total_loss: 4.317424774169922\n",
      "2025-07-24 02:46:16 | INFO | Step 90: local_loss: 2.185220241546631 | global_loss: 1.920029640197754 | total_loss: 4.105249881744385\n",
      "2025-07-24 02:47:13 | INFO | Step 91: local_loss: 2.4732003211975098 | global_loss: 1.7730584144592285 | total_loss: 4.246258735656738\n",
      "2025-07-24 02:48:10 | INFO | Step 92: local_loss: 2.9076836109161377 | global_loss: 1.0299268960952759 | total_loss: 3.937610626220703\n",
      "2025-07-24 02:49:07 | INFO | Step 93: local_loss: 2.286026954650879 | global_loss: 1.3787219524383545 | total_loss: 3.6647489070892334\n",
      "2025-07-24 02:50:05 | INFO | Step 94: local_loss: 2.430739164352417 | global_loss: 1.6878786087036133 | total_loss: 4.118618011474609\n",
      "2025-07-24 02:51:03 | INFO | Step 95: local_loss: 2.938004732131958 | global_loss: 1.7024496793746948 | total_loss: 4.640454292297363\n",
      "2025-07-24 02:52:01 | INFO | Step 96: local_loss: 2.178359270095825 | global_loss: 1.073145866394043 | total_loss: 3.251505136489868\n",
      "2025-07-24 02:52:59 | INFO | Step 97: local_loss: 2.9247500896453857 | global_loss: 2.256174087524414 | total_loss: 5.180924415588379\n",
      "2025-07-24 02:53:57 | INFO | Step 98: local_loss: 2.8570210933685303 | global_loss: 1.267698884010315 | total_loss: 4.124720096588135\n",
      "2025-07-24 02:54:55 | INFO | Step 99: local_loss: 2.427511692047119 | global_loss: 1.6078935861587524 | total_loss: 4.035405158996582\n",
      "2025-07-24 02:54:55 | INFO | Center node 297 finished at step 100 with loss 4.0354\n",
      "2025-07-24 02:54:55 | INFO | \n",
      "=== Pretraining Epoch 7 (Center Node: 10280) ===\n",
      "2025-07-24 02:55:54 | INFO | Step 0: local_loss: 4.032666206359863 | global_loss: 3.273151397705078 | total_loss: 7.305817604064941\n",
      "2025-07-24 02:56:53 | INFO | Step 1: local_loss: 2.5953152179718018 | global_loss: 3.3984580039978027 | total_loss: 5.993773460388184\n",
      "2025-07-24 02:57:54 | INFO | Step 2: local_loss: 2.8679864406585693 | global_loss: 2.2984390258789062 | total_loss: 5.166425704956055\n",
      "2025-07-24 02:58:52 | INFO | Step 3: local_loss: 3.1181530952453613 | global_loss: 2.206371545791626 | total_loss: 5.324524879455566\n",
      "2025-07-24 02:59:36 | INFO | Step 4: local_loss: 2.777386426925659 | global_loss: 3.3101844787597656 | total_loss: 6.087571144104004\n",
      "2025-07-24 03:00:34 | INFO | Step 5: local_loss: 2.2758734226226807 | global_loss: 2.7522411346435547 | total_loss: 5.028114318847656\n",
      "2025-07-24 03:01:34 | INFO | Step 6: local_loss: 2.691118001937866 | global_loss: 3.156917095184326 | total_loss: 5.848034858703613\n",
      "2025-07-24 03:02:33 | INFO | Step 7: local_loss: 2.5632214546203613 | global_loss: 2.9797770977020264 | total_loss: 5.542998313903809\n",
      "2025-07-24 03:03:32 | INFO | Step 8: local_loss: 2.4769232273101807 | global_loss: 2.356229543685913 | total_loss: 4.833152770996094\n",
      "2025-07-24 03:04:32 | INFO | Step 9: local_loss: 2.655714750289917 | global_loss: 2.95748233795166 | total_loss: 5.613197326660156\n",
      "2025-07-24 03:05:31 | INFO | Step 10: local_loss: 2.5272789001464844 | global_loss: 3.6709883213043213 | total_loss: 6.198266983032227\n",
      "2025-07-24 03:06:30 | INFO | Step 11: local_loss: 2.878477096557617 | global_loss: 2.872835159301758 | total_loss: 5.751312255859375\n",
      "2025-07-24 03:07:28 | INFO | Step 12: local_loss: 2.8587193489074707 | global_loss: 4.330281734466553 | total_loss: 7.189001083374023\n",
      "2025-07-24 03:08:28 | INFO | Step 13: local_loss: 2.6569323539733887 | global_loss: 2.836888313293457 | total_loss: 5.493820667266846\n",
      "2025-07-24 03:09:28 | INFO | Step 14: local_loss: 2.586937189102173 | global_loss: 1.9841381311416626 | total_loss: 4.571075439453125\n",
      "2025-07-24 03:10:26 | INFO | Step 15: local_loss: 2.4551422595977783 | global_loss: 2.17154598236084 | total_loss: 4.626688003540039\n",
      "2025-07-24 03:11:25 | INFO | Step 16: local_loss: 2.669822931289673 | global_loss: 2.143998384475708 | total_loss: 4.813821315765381\n",
      "2025-07-24 03:12:24 | INFO | Step 17: local_loss: 2.2818922996520996 | global_loss: 1.0943342447280884 | total_loss: 3.3762264251708984\n",
      "2025-07-24 03:13:23 | INFO | Step 18: local_loss: 2.1122803688049316 | global_loss: 2.2357258796691895 | total_loss: 4.348006248474121\n",
      "2025-07-24 03:14:22 | INFO | Step 19: local_loss: 1.935235857963562 | global_loss: 1.4260507822036743 | total_loss: 3.3612866401672363\n",
      "2025-07-24 03:15:21 | INFO | Step 20: local_loss: 2.5843896865844727 | global_loss: 2.1248908042907715 | total_loss: 4.709280490875244\n",
      "2025-07-24 03:16:20 | INFO | Step 21: local_loss: 2.677051544189453 | global_loss: 1.562923789024353 | total_loss: 4.239975452423096\n",
      "2025-07-24 03:17:20 | INFO | Step 22: local_loss: 2.4612507820129395 | global_loss: 2.9927549362182617 | total_loss: 5.454005718231201\n",
      "2025-07-24 03:18:19 | INFO | Step 23: local_loss: 2.5035388469696045 | global_loss: 2.646270990371704 | total_loss: 5.149809837341309\n",
      "2025-07-24 03:19:17 | INFO | Step 24: local_loss: 2.6902410984039307 | global_loss: 0.5803834795951843 | total_loss: 3.2706246376037598\n",
      "2025-07-24 03:20:15 | INFO | Step 25: local_loss: 2.2868144512176514 | global_loss: 1.6814231872558594 | total_loss: 3.9682376384735107\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-07-24 03:21:14 | INFO | Step 26: local_loss: 2.221248149871826 | global_loss: 2.330010414123535 | total_loss: 4.551258563995361\n",
      "2025-07-24 03:22:13 | INFO | Step 27: local_loss: 2.286952495574951 | global_loss: 1.0001153945922852 | total_loss: 3.2870678901672363\n",
      "2025-07-24 03:23:11 | INFO | Step 28: local_loss: 2.6877377033233643 | global_loss: 1.1226003170013428 | total_loss: 3.810338020324707\n",
      "2025-07-24 03:24:09 | INFO | Step 29: local_loss: 2.7075390815734863 | global_loss: 1.1564507484436035 | total_loss: 3.86398983001709\n",
      "2025-07-24 03:25:05 | INFO | Step 30: local_loss: 3.754392623901367 | global_loss: 1.0607753992080688 | total_loss: 4.8151679039001465\n",
      "2025-07-24 03:25:50 | INFO | Step 31: local_loss: 2.5615251064300537 | global_loss: 1.5968198776245117 | total_loss: 4.1583452224731445\n",
      "2025-07-24 03:26:47 | INFO | Step 32: local_loss: 2.502265214920044 | global_loss: 1.7472000122070312 | total_loss: 4.249464988708496\n",
      "2025-07-24 03:27:48 | INFO | Step 33: local_loss: 2.2982776165008545 | global_loss: 0.8673925995826721 | total_loss: 3.165670156478882\n",
      "2025-07-24 03:28:45 | INFO | Step 34: local_loss: 2.2598233222961426 | global_loss: 1.4198508262634277 | total_loss: 3.6796741485595703\n",
      "2025-07-24 03:29:45 | INFO | Step 35: local_loss: 2.2411530017852783 | global_loss: 2.0464963912963867 | total_loss: 4.287649154663086\n",
      "2025-07-24 03:30:43 | INFO | Step 36: local_loss: 2.4560885429382324 | global_loss: 1.7393512725830078 | total_loss: 4.19543981552124\n",
      "2025-07-24 03:31:27 | INFO | Step 37: local_loss: 2.4520463943481445 | global_loss: 2.3169045448303223 | total_loss: 4.768950939178467\n",
      "2025-07-24 03:32:24 | INFO | Step 38: local_loss: 2.564577579498291 | global_loss: 0.7647625207901001 | total_loss: 3.3293399810791016\n",
      "2025-07-24 03:33:23 | INFO | Step 39: local_loss: 3.136336326599121 | global_loss: 3.1009020805358887 | total_loss: 6.23723840713501\n",
      "2025-07-24 03:34:22 | INFO | Step 40: local_loss: 2.316220760345459 | global_loss: 1.5275095701217651 | total_loss: 3.8437304496765137\n",
      "2025-07-24 03:35:21 | INFO | Step 41: local_loss: 2.4672274589538574 | global_loss: 1.826202154159546 | total_loss: 4.293429374694824\n",
      "2025-07-24 03:36:19 | INFO | Step 42: local_loss: 2.9743080139160156 | global_loss: 1.0502344369888306 | total_loss: 4.024542331695557\n",
      "2025-07-24 03:37:18 | INFO | Step 43: local_loss: 2.276174545288086 | global_loss: 0.7267432808876038 | total_loss: 3.002917766571045\n",
      "2025-07-24 03:38:15 | INFO | Step 44: local_loss: 2.1498641967773438 | global_loss: 0.842330276966095 | total_loss: 2.992194414138794\n",
      "2025-07-24 03:39:14 | INFO | Step 45: local_loss: 2.351680040359497 | global_loss: 1.6186366081237793 | total_loss: 3.9703166484832764\n",
      "2025-07-24 03:40:13 | INFO | Step 46: local_loss: 2.2236011028289795 | global_loss: 0.7669193744659424 | total_loss: 2.990520477294922\n",
      "2025-07-24 03:41:12 | INFO | Step 47: local_loss: 2.678896427154541 | global_loss: 0.7080322504043579 | total_loss: 3.3869285583496094\n",
      "2025-07-24 03:42:10 | INFO | Step 48: local_loss: 2.199631690979004 | global_loss: 1.3693878650665283 | total_loss: 3.5690195560455322\n",
      "2025-07-24 03:43:09 | INFO | Step 49: local_loss: 2.21368670463562 | global_loss: 0.5285305380821228 | total_loss: 2.7422173023223877\n",
      "2025-07-24 03:44:09 | INFO | Step 50: local_loss: 1.8906511068344116 | global_loss: 2.2628602981567383 | total_loss: 4.1535115242004395\n",
      "2025-07-24 03:45:08 | INFO | Step 51: local_loss: 2.4057180881500244 | global_loss: 1.2065519094467163 | total_loss: 3.612269878387451\n",
      "2025-07-24 03:46:07 | INFO | Step 52: local_loss: 1.8910725116729736 | global_loss: 0.3829556107521057 | total_loss: 2.2740280628204346\n",
      "2025-07-24 03:47:06 | INFO | Step 53: local_loss: 2.064807891845703 | global_loss: 2.3249661922454834 | total_loss: 4.389774322509766\n",
      "2025-07-24 03:48:06 | INFO | Step 54: local_loss: 1.809955358505249 | global_loss: 1.1082789897918701 | total_loss: 2.918234348297119\n",
      "2025-07-24 03:49:03 | INFO | Step 55: local_loss: 2.0176570415496826 | global_loss: 1.0024089813232422 | total_loss: 3.020066022872925\n",
      "2025-07-24 03:50:01 | INFO | Step 56: local_loss: 2.312378168106079 | global_loss: 0.6206380724906921 | total_loss: 2.933016300201416\n",
      "2025-07-24 03:50:59 | INFO | Step 57: local_loss: 1.8846758604049683 | global_loss: 0.8066087961196899 | total_loss: 2.691284656524658\n",
      "2025-07-24 03:51:57 | INFO | Step 58: local_loss: 2.4511659145355225 | global_loss: 0.09727184474468231 | total_loss: 2.5484378337860107\n",
      "2025-07-24 03:52:55 | INFO | Step 59: local_loss: 2.1214632987976074 | global_loss: 0.36542809009552 | total_loss: 2.486891269683838\n",
      "2025-07-24 03:53:53 | INFO | Step 60: local_loss: 2.2094435691833496 | global_loss: 0.05780282989144325 | total_loss: 2.2672464847564697\n",
      "2025-07-24 03:54:51 | INFO | Step 61: local_loss: 2.2050297260284424 | global_loss: 0.08517815917730331 | total_loss: 2.290207862854004\n",
      "2025-07-24 03:55:50 | INFO | Step 62: local_loss: 2.0522923469543457 | global_loss: 0.534437894821167 | total_loss: 2.5867302417755127\n",
      "2025-07-24 03:56:49 | INFO | Step 63: local_loss: 1.9250284433364868 | global_loss: 1.0033607482910156 | total_loss: 2.928389072418213\n",
      "2025-07-24 03:57:47 | INFO | Step 64: local_loss: 1.9534132480621338 | global_loss: 0.30517491698265076 | total_loss: 2.2585880756378174\n",
      "2025-07-24 03:58:44 | INFO | Step 65: local_loss: 2.1231400966644287 | global_loss: 0.11393154412508011 | total_loss: 2.2370717525482178\n",
      "2025-07-24 03:59:44 | INFO | Step 66: local_loss: 1.787392258644104 | global_loss: 0.6937733292579651 | total_loss: 2.481165647506714\n",
      "2025-07-24 04:00:42 | INFO | Step 67: local_loss: 1.53622305393219 | global_loss: 0.8080399036407471 | total_loss: 2.3442630767822266\n",
      "2025-07-24 04:01:41 | INFO | Step 68: local_loss: 3.781757354736328 | global_loss: 4.555537700653076 | total_loss: 8.337295532226562\n",
      "2025-07-24 04:02:39 | INFO | Step 69: local_loss: 1.8451464176177979 | global_loss: 0.21013273298740387 | total_loss: 2.05527925491333\n",
      "2025-07-24 04:03:38 | INFO | Step 70: local_loss: 2.853517770767212 | global_loss: 3.9264495372772217 | total_loss: 6.779967308044434\n",
      "2025-07-24 04:04:36 | INFO | Step 71: local_loss: 2.310481548309326 | global_loss: 0.8709697127342224 | total_loss: 3.1814513206481934\n",
      "2025-07-24 04:05:36 | INFO | Step 72: local_loss: 2.6989192962646484 | global_loss: 0.6606619358062744 | total_loss: 3.359581232070923\n",
      "2025-07-24 04:06:34 | INFO | Step 73: local_loss: 2.3829054832458496 | global_loss: 0.6622531414031982 | total_loss: 3.045158624649048\n",
      "2025-07-24 04:07:33 | INFO | Step 74: local_loss: 2.213035821914673 | global_loss: 0.7067080736160278 | total_loss: 2.9197440147399902\n",
      "2025-07-24 04:08:32 | INFO | Step 75: local_loss: 2.872302532196045 | global_loss: 0.5846644639968872 | total_loss: 3.4569668769836426\n",
      "2025-07-24 04:09:32 | INFO | Step 76: local_loss: 2.0765464305877686 | global_loss: 1.1259479522705078 | total_loss: 3.2024943828582764\n",
      "2025-07-24 04:10:16 | INFO | Step 77: local_loss: 2.923447847366333 | global_loss: 1.591383457183838 | total_loss: 4.51483154296875\n",
      "2025-07-24 04:11:13 | INFO | Step 78: local_loss: 2.4300222396850586 | global_loss: 1.3588836193084717 | total_loss: 3.7889058589935303\n",
      "2025-07-24 04:12:13 | INFO | Step 79: local_loss: 2.4357540607452393 | global_loss: 2.9023449420928955 | total_loss: 5.338099002838135\n",
      "2025-07-24 04:13:11 | INFO | Step 80: local_loss: 2.055042028427124 | global_loss: 0.9071104526519775 | total_loss: 2.9621524810791016\n",
      "2025-07-24 04:14:10 | INFO | Step 81: local_loss: 1.9490070343017578 | global_loss: 2.0779612064361572 | total_loss: 4.026968002319336\n",
      "2025-07-24 04:15:09 | INFO | Step 82: local_loss: 2.5535123348236084 | global_loss: 1.2209481000900269 | total_loss: 3.7744603157043457\n",
      "2025-07-24 04:16:07 | INFO | Step 83: local_loss: 2.0301947593688965 | global_loss: 1.0035899877548218 | total_loss: 3.033784866333008\n",
      "2025-07-24 04:17:04 | INFO | Step 84: local_loss: 2.4181909561157227 | global_loss: 1.8635176420211792 | total_loss: 4.281708717346191\n",
      "2025-07-24 04:18:02 | INFO | Step 85: local_loss: 1.9351263046264648 | global_loss: 1.428262710571289 | total_loss: 3.363389015197754\n",
      "2025-07-24 04:19:01 | INFO | Step 86: local_loss: 2.557534694671631 | global_loss: 2.4699196815490723 | total_loss: 5.027454376220703\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-07-24 04:20:00 | INFO | Step 87: local_loss: 2.4814136028289795 | global_loss: 1.314896583557129 | total_loss: 3.7963101863861084\n",
      "2025-07-24 04:20:58 | INFO | Step 88: local_loss: 2.515533924102783 | global_loss: 1.0510997772216797 | total_loss: 3.566633701324463\n",
      "2025-07-24 04:21:58 | INFO | Step 89: local_loss: 2.3591878414154053 | global_loss: 0.836196780204773 | total_loss: 3.1953845024108887\n",
      "2025-07-24 04:22:59 | INFO | Step 90: local_loss: 1.9838712215423584 | global_loss: 1.204761028289795 | total_loss: 3.1886322498321533\n",
      "2025-07-24 04:23:56 | INFO | Step 91: local_loss: 2.6277835369110107 | global_loss: 0.9459593296051025 | total_loss: 3.5737428665161133\n",
      "2025-07-24 04:24:55 | INFO | Step 92: local_loss: 2.1452486515045166 | global_loss: 1.3061189651489258 | total_loss: 3.4513676166534424\n",
      "2025-07-24 04:25:54 | INFO | Step 93: local_loss: 2.164008140563965 | global_loss: 1.5386825799942017 | total_loss: 3.702690601348877\n",
      "2025-07-24 04:26:52 | INFO | Step 94: local_loss: 1.9835718870162964 | global_loss: 1.3106555938720703 | total_loss: 3.2942276000976562\n",
      "2025-07-24 04:27:50 | INFO | Step 95: local_loss: 2.050039529800415 | global_loss: 1.1111087799072266 | total_loss: 3.1611483097076416\n",
      "2025-07-24 04:28:49 | INFO | Step 96: local_loss: 1.7928788661956787 | global_loss: 0.7899020314216614 | total_loss: 2.5827808380126953\n",
      "2025-07-24 04:29:48 | INFO | Step 97: local_loss: 1.9869431257247925 | global_loss: 1.1825594902038574 | total_loss: 3.1695027351379395\n",
      "2025-07-24 04:30:46 | INFO | Step 98: local_loss: 1.983546257019043 | global_loss: 1.1213361024856567 | total_loss: 3.10488224029541\n",
      "2025-07-24 04:31:45 | INFO | Step 99: local_loss: 1.9508132934570312 | global_loss: 1.0992099046707153 | total_loss: 3.050023078918457\n",
      "2025-07-24 04:31:45 | INFO | Center node 10280 finished at step 100 with loss 3.0500\n",
      "2025-07-24 04:31:45 | INFO | \n",
      "=== Pretraining Epoch 8 (Center Node: 4189) ===\n",
      "2025-07-24 04:32:42 | INFO | Step 0: local_loss: 3.8458573818206787 | global_loss: 3.966064691543579 | total_loss: 7.811922073364258\n",
      "2025-07-24 04:33:40 | INFO | Step 1: local_loss: 4.156486511230469 | global_loss: 4.344814777374268 | total_loss: 8.501300811767578\n",
      "2025-07-24 04:34:37 | INFO | Step 2: local_loss: 3.859175682067871 | global_loss: 3.646169662475586 | total_loss: 7.505345344543457\n",
      "2025-07-24 04:35:34 | INFO | Step 3: local_loss: 3.956698179244995 | global_loss: 4.537811756134033 | total_loss: 8.49450969696045\n",
      "2025-07-24 04:36:32 | INFO | Step 4: local_loss: 4.471311569213867 | global_loss: 3.7417993545532227 | total_loss: 8.21311092376709\n",
      "2025-07-24 04:37:30 | INFO | Step 5: local_loss: 3.8577258586883545 | global_loss: 3.9557082653045654 | total_loss: 7.81343412399292\n",
      "2025-07-24 04:38:27 | INFO | Step 6: local_loss: 3.7263317108154297 | global_loss: 2.5230815410614014 | total_loss: 6.24941349029541\n",
      "2025-07-24 04:39:25 | INFO | Step 7: local_loss: 3.929161310195923 | global_loss: 3.1756813526153564 | total_loss: 7.104842662811279\n",
      "2025-07-24 04:40:23 | INFO | Step 8: local_loss: 3.454113721847534 | global_loss: 2.065195322036743 | total_loss: 5.519309043884277\n",
      "2025-07-24 04:41:22 | INFO | Step 9: local_loss: 3.475712299346924 | global_loss: 3.40775203704834 | total_loss: 6.883464336395264\n",
      "2025-07-24 04:42:19 | INFO | Step 10: local_loss: 3.545109748840332 | global_loss: 2.932798147201538 | total_loss: 6.477908134460449\n",
      "2025-07-24 04:43:19 | INFO | Step 11: local_loss: 3.345181703567505 | global_loss: 3.2777674198150635 | total_loss: 6.622949123382568\n",
      "2025-07-24 04:44:14 | INFO | Step 12: local_loss: 2.94854998588562 | global_loss: 2.319561719894409 | total_loss: 5.268111705780029\n",
      "2025-07-24 04:45:13 | INFO | Step 13: local_loss: 2.287555456161499 | global_loss: 2.814120292663574 | total_loss: 5.101675987243652\n",
      "2025-07-24 04:46:11 | INFO | Step 14: local_loss: 2.731081247329712 | global_loss: 1.7956840991973877 | total_loss: 4.5267653465271\n",
      "2025-07-24 04:47:10 | INFO | Step 15: local_loss: 2.54358172416687 | global_loss: 1.234804630279541 | total_loss: 3.778386354446411\n",
      "2025-07-24 04:48:07 | INFO | Step 16: local_loss: 2.372361183166504 | global_loss: 2.010671615600586 | total_loss: 4.38303279876709\n",
      "2025-07-24 04:49:04 | INFO | Step 17: local_loss: 2.541006565093994 | global_loss: 0.9403076171875 | total_loss: 3.481314182281494\n",
      "2025-07-24 04:50:01 | INFO | Step 18: local_loss: 3.0139424800872803 | global_loss: 1.6304311752319336 | total_loss: 4.644373893737793\n",
      "2025-07-24 04:50:58 | INFO | Step 19: local_loss: 2.909637451171875 | global_loss: 0.6985054612159729 | total_loss: 3.608142852783203\n",
      "2025-07-24 04:51:57 | INFO | Step 20: local_loss: 4.485131740570068 | global_loss: 5.330600261688232 | total_loss: 9.8157320022583\n",
      "2025-07-24 04:52:55 | INFO | Step 21: local_loss: 2.828524112701416 | global_loss: 1.3230804204940796 | total_loss: 4.151604652404785\n",
      "2025-07-24 04:53:52 | INFO | Step 22: local_loss: 3.285313606262207 | global_loss: 0.987586498260498 | total_loss: 4.272900104522705\n",
      "2025-07-24 04:54:51 | INFO | Step 23: local_loss: 2.768634080886841 | global_loss: 1.0857199430465698 | total_loss: 3.854353904724121\n",
      "2025-07-24 04:55:49 | INFO | Step 24: local_loss: 2.950971841812134 | global_loss: 1.7368671894073486 | total_loss: 4.687839031219482\n",
      "2025-07-24 04:56:48 | INFO | Step 25: local_loss: 3.074105978012085 | global_loss: 1.597968578338623 | total_loss: 4.672074317932129\n",
      "2025-07-24 04:57:45 | INFO | Step 26: local_loss: 2.6558144092559814 | global_loss: 1.6958814859390259 | total_loss: 4.351696014404297\n",
      "2025-07-24 04:58:43 | INFO | Step 27: local_loss: 2.794707775115967 | global_loss: 2.8690786361694336 | total_loss: 5.6637864112854\n",
      "2025-07-24 04:59:40 | INFO | Step 28: local_loss: 2.3674309253692627 | global_loss: 1.5483006238937378 | total_loss: 3.915731430053711\n",
      "2025-07-24 05:00:38 | INFO | Step 29: local_loss: 2.5299291610717773 | global_loss: 1.1667343378067017 | total_loss: 3.6966633796691895\n",
      "2025-07-24 05:01:36 | INFO | Step 30: local_loss: 2.675006628036499 | global_loss: 1.4687389135360718 | total_loss: 4.143745422363281\n",
      "2025-07-24 05:02:35 | INFO | Step 31: local_loss: 2.2640295028686523 | global_loss: 1.8587863445281982 | total_loss: 4.12281608581543\n",
      "2025-07-24 05:03:33 | INFO | Step 32: local_loss: 1.8253095149993896 | global_loss: 0.8727644681930542 | total_loss: 2.6980738639831543\n",
      "2025-07-24 05:04:30 | INFO | Step 33: local_loss: 1.993915319442749 | global_loss: 2.805872917175293 | total_loss: 4.799788475036621\n",
      "2025-07-24 05:05:28 | INFO | Step 34: local_loss: 1.9253737926483154 | global_loss: 1.8337304592132568 | total_loss: 3.7591042518615723\n",
      "2025-07-24 05:06:24 | INFO | Step 35: local_loss: 1.9488369226455688 | global_loss: 0.8926564455032349 | total_loss: 2.8414933681488037\n",
      "2025-07-24 05:07:22 | INFO | Step 36: local_loss: 2.075000524520874 | global_loss: 0.7355043888092041 | total_loss: 2.810504913330078\n",
      "2025-07-24 05:08:19 | INFO | Step 37: local_loss: 2.3496994972229004 | global_loss: 0.2777863144874573 | total_loss: 2.627485752105713\n",
      "2025-07-24 05:09:17 | INFO | Step 38: local_loss: 2.142895221710205 | global_loss: 0.35308825969696045 | total_loss: 2.495983600616455\n",
      "2025-07-24 05:10:14 | INFO | Step 39: local_loss: 1.5637707710266113 | global_loss: 0.1506594717502594 | total_loss: 1.7144302129745483\n",
      "2025-07-24 05:11:12 | INFO | Step 40: local_loss: 2.390192747116089 | global_loss: 0.9286736845970154 | total_loss: 3.318866491317749\n",
      "2025-07-24 05:12:11 | INFO | Step 41: local_loss: 2.631079912185669 | global_loss: 0.7973464727401733 | total_loss: 3.4284262657165527\n",
      "2025-07-24 05:13:09 | INFO | Step 42: local_loss: 1.761849045753479 | global_loss: 1.333217978477478 | total_loss: 3.095067024230957\n",
      "2025-07-24 05:14:08 | INFO | Step 43: local_loss: 2.24463152885437 | global_loss: 1.069347620010376 | total_loss: 3.313979148864746\n",
      "2025-07-24 05:15:05 | INFO | Step 44: local_loss: 2.0589423179626465 | global_loss: 1.1141107082366943 | total_loss: 3.173053026199341\n",
      "2025-07-24 05:16:04 | INFO | Step 45: local_loss: 1.6069649457931519 | global_loss: 0.09318561851978302 | total_loss: 1.7001506090164185\n",
      "2025-07-24 05:17:01 | INFO | Step 46: local_loss: 2.0831210613250732 | global_loss: 1.3914726972579956 | total_loss: 3.4745936393737793\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-07-24 05:18:00 | INFO | Step 47: local_loss: 2.2261805534362793 | global_loss: 0.8220426440238953 | total_loss: 3.0482232570648193\n",
      "2025-07-24 05:18:57 | INFO | Step 48: local_loss: 1.877734661102295 | global_loss: 0.6778289675712585 | total_loss: 2.5555636882781982\n",
      "2025-07-24 05:19:41 | INFO | Step 49: local_loss: 2.676534414291382 | global_loss: 0.3349834680557251 | total_loss: 3.0115180015563965\n",
      "2025-07-24 05:20:37 | INFO | Step 50: local_loss: 1.9981615543365479 | global_loss: 0.3142927885055542 | total_loss: 2.3124542236328125\n",
      "2025-07-24 05:21:33 | INFO | Step 51: local_loss: 1.968103289604187 | global_loss: 0.061827950179576874 | total_loss: 2.0299313068389893\n",
      "2025-07-24 05:22:32 | INFO | Step 52: local_loss: 2.7399661540985107 | global_loss: 0.18494385480880737 | total_loss: 2.924910068511963\n",
      "2025-07-24 05:23:30 | INFO | Step 53: local_loss: 1.7683160305023193 | global_loss: 0.0003032981767319143 | total_loss: 1.7686192989349365\n",
      "2025-07-24 05:24:28 | INFO | Step 54: local_loss: 1.9605761766433716 | global_loss: 0.04433406889438629 | total_loss: 2.0049102306365967\n",
      "2025-07-24 05:25:27 | INFO | Step 55: local_loss: 1.9782699346542358 | global_loss: 0.13319459557533264 | total_loss: 2.111464500427246\n",
      "2025-07-24 05:26:23 | INFO | Step 56: local_loss: 2.7611711025238037 | global_loss: 1.2551254034042358 | total_loss: 4.01629638671875\n",
      "2025-07-24 05:27:07 | INFO | Step 57: local_loss: 2.2935898303985596 | global_loss: 0.12451256066560745 | total_loss: 2.418102502822876\n",
      "2025-07-24 05:28:03 | INFO | Step 58: local_loss: 1.779762864112854 | global_loss: 0.49032238125801086 | total_loss: 2.270085334777832\n",
      "2025-07-24 05:29:02 | INFO | Step 59: local_loss: 1.910400390625 | global_loss: 0.524566650390625 | total_loss: 2.434967041015625\n",
      "2025-07-24 05:29:59 | INFO | Step 60: local_loss: 2.3118693828582764 | global_loss: 0.0808543860912323 | total_loss: 2.392723798751831\n",
      "2025-07-24 05:30:58 | INFO | Step 61: local_loss: 1.854453444480896 | global_loss: 0.04478995129466057 | total_loss: 1.8992433547973633\n",
      "2025-07-24 05:31:56 | INFO | Step 62: local_loss: 1.9920634031295776 | global_loss: 0.19837871193885803 | total_loss: 2.1904420852661133\n",
      "2025-07-24 05:32:54 | INFO | Step 63: local_loss: 2.1340129375457764 | global_loss: 0.685924768447876 | total_loss: 2.8199377059936523\n",
      "2025-07-24 05:33:52 | INFO | Step 64: local_loss: 1.7360515594482422 | global_loss: 0.08924601227045059 | total_loss: 1.8252975940704346\n",
      "2025-07-24 05:34:50 | INFO | Step 65: local_loss: 2.2949326038360596 | global_loss: 0.34405314922332764 | total_loss: 2.6389856338500977\n",
      "2025-07-24 05:35:48 | INFO | Step 66: local_loss: 2.049804925918579 | global_loss: 0.2935250401496887 | total_loss: 2.343329906463623\n",
      "2025-07-24 05:36:45 | INFO | Step 67: local_loss: 1.87906014919281 | global_loss: 0.015888195484876633 | total_loss: 1.8949483633041382\n",
      "2025-07-24 05:37:43 | INFO | Step 68: local_loss: 2.3643672466278076 | global_loss: 0.0921780914068222 | total_loss: 2.456545352935791\n",
      "2025-07-24 05:38:42 | INFO | Step 69: local_loss: 1.7853612899780273 | global_loss: 0.011584016494452953 | total_loss: 1.796945333480835\n",
      "2025-07-24 05:39:40 | INFO | Step 70: local_loss: 2.1551642417907715 | global_loss: 0.8369420170783997 | total_loss: 2.9921061992645264\n",
      "2025-07-24 05:40:38 | INFO | Step 71: local_loss: 1.679565668106079 | global_loss: 0.2750932574272156 | total_loss: 1.9546589851379395\n",
      "2025-07-24 05:41:35 | INFO | Step 72: local_loss: 1.942734956741333 | global_loss: 0.6691862344741821 | total_loss: 2.6119213104248047\n",
      "2025-07-24 05:42:33 | INFO | Step 73: local_loss: 1.4086501598358154 | global_loss: 0.02002369612455368 | total_loss: 1.4286738634109497\n",
      "2025-07-24 05:43:30 | INFO | Step 74: local_loss: 1.675201177597046 | global_loss: 0.31353050470352173 | total_loss: 1.9887316226959229\n",
      "2025-07-24 05:44:28 | INFO | Step 75: local_loss: 1.8390954732894897 | global_loss: 0.12005569040775299 | total_loss: 1.9591511487960815\n",
      "2025-07-24 05:45:25 | INFO | Step 76: local_loss: 1.875213861465454 | global_loss: 0.36071521043777466 | total_loss: 2.235929012298584\n",
      "2025-07-24 05:46:23 | INFO | Step 77: local_loss: 2.2493913173675537 | global_loss: 0.09915625303983688 | total_loss: 2.3485474586486816\n",
      "2025-07-24 05:47:21 | INFO | Step 78: local_loss: 1.6325390338897705 | global_loss: 0.03761528432369232 | total_loss: 1.670154333114624\n",
      "2025-07-24 05:48:20 | INFO | Step 79: local_loss: 2.1902284622192383 | global_loss: 0.2408466339111328 | total_loss: 2.431075096130371\n",
      "2025-07-24 05:49:18 | INFO | Step 80: local_loss: 1.9994392395019531 | global_loss: 0.013387436047196388 | total_loss: 2.012826681137085\n",
      "2025-07-24 05:50:14 | INFO | Step 81: local_loss: 1.9856817722320557 | global_loss: 6.335727084660903e-05 | total_loss: 1.9857450723648071\n",
      "2025-07-24 05:51:11 | INFO | Step 82: local_loss: 1.7585070133209229 | global_loss: 0.01590418815612793 | total_loss: 1.7744112014770508\n",
      "2025-07-24 05:52:09 | INFO | Step 83: local_loss: 1.6418333053588867 | global_loss: 0.22195972502231598 | total_loss: 1.8637930154800415\n",
      "2025-07-24 05:53:06 | INFO | Step 84: local_loss: 1.8100179433822632 | global_loss: 1.3633613586425781 | total_loss: 3.173379421234131\n",
      "2025-07-24 05:53:50 | INFO | Step 85: local_loss: 1.833744764328003 | global_loss: 0.0813981220126152 | total_loss: 1.9151428937911987\n",
      "2025-07-24 05:54:46 | INFO | Step 86: local_loss: 1.6101248264312744 | global_loss: 0.06715060025453568 | total_loss: 1.6772754192352295\n",
      "2025-07-24 05:55:45 | INFO | Step 87: local_loss: 1.488545298576355 | global_loss: 0.47209975123405457 | total_loss: 1.960645079612732\n",
      "2025-07-24 05:56:42 | INFO | Step 88: local_loss: 2.102632999420166 | global_loss: 0.1586628258228302 | total_loss: 2.261295795440674\n",
      "2025-07-24 05:57:40 | INFO | Step 89: local_loss: 1.9162726402282715 | global_loss: 0.03375859186053276 | total_loss: 1.9500312805175781\n",
      "2025-07-24 05:58:39 | INFO | Step 90: local_loss: 2.0858685970306396 | global_loss: 0.46724262833595276 | total_loss: 2.5531113147735596\n",
      "2025-07-24 05:59:37 | INFO | Step 91: local_loss: 1.4755828380584717 | global_loss: 0.5382872223854065 | total_loss: 2.0138700008392334\n",
      "2025-07-24 06:00:35 | INFO | Step 92: local_loss: 1.6197493076324463 | global_loss: 0.052180688828229904 | total_loss: 1.671929955482483\n",
      "2025-07-24 06:01:34 | INFO | Step 93: local_loss: 1.7909352779388428 | global_loss: 1.4719102382659912 | total_loss: 3.262845516204834\n",
      "2025-07-24 06:02:32 | INFO | Step 94: local_loss: 1.8227670192718506 | global_loss: 0.34949496388435364 | total_loss: 2.172261953353882\n",
      "2025-07-24 06:03:30 | INFO | Step 95: local_loss: 2.6455626487731934 | global_loss: 0.002603198168799281 | total_loss: 2.6481659412384033\n",
      "2025-07-24 06:04:29 | INFO | Step 96: local_loss: 1.6064138412475586 | global_loss: 0.31948497891426086 | total_loss: 1.925898790359497\n",
      "2025-07-24 06:05:28 | INFO | Step 97: local_loss: 1.6519851684570312 | global_loss: 0.07309409230947495 | total_loss: 1.7250792980194092\n",
      "2025-07-24 06:06:26 | INFO | Step 98: local_loss: 1.6838605403900146 | global_loss: 0.03562053292989731 | total_loss: 1.719481110572815\n",
      "2025-07-24 06:07:24 | INFO | Step 99: local_loss: 1.8640143871307373 | global_loss: 0.1520858258008957 | total_loss: 2.0161001682281494\n",
      "2025-07-24 06:07:24 | INFO | Center node 4189 finished at step 100 with loss 2.0161\n",
      "2025-07-24 06:07:24 | INFO | \n",
      "=== Pretraining Epoch 9 (Center Node: 3682) ===\n",
      "2025-07-24 06:08:25 | INFO | Step 0: local_loss: 4.438149452209473 | global_loss: 4.783164978027344 | total_loss: 9.221314430236816\n",
      "2025-07-24 06:09:26 | INFO | Step 1: local_loss: 4.320901870727539 | global_loss: 5.835484027862549 | total_loss: 10.15638542175293\n",
      "2025-07-24 06:10:28 | INFO | Step 2: local_loss: 3.989332914352417 | global_loss: 4.836910247802734 | total_loss: 8.82624340057373\n",
      "2025-07-24 06:11:15 | INFO | Step 3: local_loss: 5.026483058929443 | global_loss: 3.4505226612091064 | total_loss: 8.477005958557129\n",
      "2025-07-24 06:12:14 | INFO | Step 4: local_loss: 3.953991651535034 | global_loss: 4.213409900665283 | total_loss: 8.167401313781738\n",
      "2025-07-24 06:13:16 | INFO | Step 5: local_loss: 3.9721269607543945 | global_loss: 4.822309494018555 | total_loss: 8.79443645477295\n",
      "2025-07-24 06:14:18 | INFO | Step 6: local_loss: 4.112691402435303 | global_loss: 3.3851571083068848 | total_loss: 7.4978485107421875\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-07-24 06:15:19 | INFO | Step 7: local_loss: 3.8505361080169678 | global_loss: 4.0211968421936035 | total_loss: 7.871732711791992\n",
      "2025-07-24 06:16:20 | INFO | Step 8: local_loss: 4.274524211883545 | global_loss: 3.9627768993377686 | total_loss: 8.237300872802734\n",
      "2025-07-24 06:17:06 | INFO | Step 9: local_loss: 4.353697299957275 | global_loss: 4.600160598754883 | total_loss: 8.953857421875\n",
      "2025-07-24 06:18:06 | INFO | Step 10: local_loss: 4.11422872543335 | global_loss: 4.1604228019714355 | total_loss: 8.274651527404785\n",
      "2025-07-24 06:19:08 | INFO | Step 11: local_loss: 4.369743347167969 | global_loss: 4.6462507247924805 | total_loss: 9.01599407196045\n",
      "2025-07-24 06:20:08 | INFO | Step 12: local_loss: 3.9254045486450195 | global_loss: 3.087550401687622 | total_loss: 7.0129547119140625\n",
      "2025-07-24 06:21:10 | INFO | Step 13: local_loss: 4.083858489990234 | global_loss: 3.7831554412841797 | total_loss: 7.867013931274414\n",
      "2025-07-24 06:22:11 | INFO | Step 14: local_loss: 4.31463098526001 | global_loss: 4.098606586456299 | total_loss: 8.413237571716309\n",
      "2025-07-24 06:23:13 | INFO | Step 15: local_loss: 3.772672653198242 | global_loss: 3.781048059463501 | total_loss: 7.553720474243164\n",
      "2025-07-24 06:24:15 | INFO | Step 16: local_loss: 4.74336576461792 | global_loss: 3.286625623703003 | total_loss: 8.029991149902344\n",
      "2025-07-24 06:25:14 | INFO | Step 17: local_loss: 4.121752738952637 | global_loss: 3.493635416030884 | total_loss: 7.615387916564941\n",
      "2025-07-24 06:26:17 | INFO | Step 18: local_loss: 3.874844789505005 | global_loss: 3.5990960597991943 | total_loss: 7.473940849304199\n",
      "2025-07-24 06:27:17 | INFO | Step 19: local_loss: 4.290085315704346 | global_loss: 4.45556116104126 | total_loss: 8.745646476745605\n",
      "2025-07-24 06:28:18 | INFO | Step 20: local_loss: 3.893855094909668 | global_loss: 3.9130618572235107 | total_loss: 7.806917190551758\n",
      "2025-07-24 06:29:21 | INFO | Step 21: local_loss: 4.189300060272217 | global_loss: 4.360490798950195 | total_loss: 8.54979133605957\n",
      "2025-07-24 06:30:22 | INFO | Step 22: local_loss: 3.9141900539398193 | global_loss: 4.582600116729736 | total_loss: 8.496789932250977\n",
      "2025-07-24 06:31:24 | INFO | Step 23: local_loss: 3.9098734855651855 | global_loss: 2.863410234451294 | total_loss: 6.773283958435059\n",
      "2025-07-24 06:32:25 | INFO | Step 24: local_loss: 4.632506370544434 | global_loss: 2.954890012741089 | total_loss: 7.587396621704102\n",
      "2025-07-24 06:33:27 | INFO | Step 25: local_loss: 3.82377552986145 | global_loss: 3.70015287399292 | total_loss: 7.523928642272949\n",
      "2025-07-24 06:34:28 | INFO | Step 26: local_loss: 3.6549999713897705 | global_loss: 4.209250450134277 | total_loss: 7.864250183105469\n",
      "2025-07-24 06:35:28 | INFO | Step 27: local_loss: 4.042776584625244 | global_loss: 3.9606471061706543 | total_loss: 8.003423690795898\n",
      "2025-07-24 06:36:30 | INFO | Step 28: local_loss: 3.076880693435669 | global_loss: 3.3940019607543945 | total_loss: 6.470882415771484\n",
      "2025-07-24 06:37:31 | INFO | Step 29: local_loss: 3.9241650104522705 | global_loss: 3.454007625579834 | total_loss: 7.378172874450684\n",
      "2025-07-24 06:38:31 | INFO | Step 30: local_loss: 3.7174458503723145 | global_loss: 4.087993621826172 | total_loss: 7.805439472198486\n",
      "2025-07-24 06:39:31 | INFO | Step 31: local_loss: 3.992877960205078 | global_loss: 4.160595893859863 | total_loss: 8.153473854064941\n",
      "2025-07-24 06:40:32 | INFO | Step 32: local_loss: 3.680553436279297 | global_loss: 3.8783674240112305 | total_loss: 7.558920860290527\n",
      "2025-07-24 06:41:34 | INFO | Step 33: local_loss: 4.336645126342773 | global_loss: 2.840209484100342 | total_loss: 7.176854610443115\n",
      "2025-07-24 06:42:35 | INFO | Step 34: local_loss: 3.4345006942749023 | global_loss: 2.9592363834381104 | total_loss: 6.393736839294434\n",
      "2025-07-24 06:43:37 | INFO | Step 35: local_loss: 3.4759674072265625 | global_loss: 4.5224761962890625 | total_loss: 7.998443603515625\n",
      "2025-07-24 06:44:39 | INFO | Step 36: local_loss: 2.7221004962921143 | global_loss: 2.4865806102752686 | total_loss: 5.208681106567383\n",
      "2025-07-24 06:45:41 | INFO | Step 37: local_loss: 3.0049736499786377 | global_loss: 2.6984591484069824 | total_loss: 5.703433036804199\n",
      "2025-07-24 06:46:41 | INFO | Step 38: local_loss: 1.5920249223709106 | global_loss: 1.6326810121536255 | total_loss: 3.224705934524536\n",
      "2025-07-24 06:47:42 | INFO | Step 39: local_loss: 1.6843950748443604 | global_loss: 0.9948193430900574 | total_loss: 2.6792144775390625\n",
      "2025-07-24 06:48:43 | INFO | Step 40: local_loss: 1.981067180633545 | global_loss: 1.4918875694274902 | total_loss: 3.472954750061035\n",
      "2025-07-24 06:49:43 | INFO | Step 41: local_loss: 3.0045835971832275 | global_loss: 2.5822482109069824 | total_loss: 5.586832046508789\n",
      "2025-07-24 06:50:45 | INFO | Step 42: local_loss: 1.4287692308425903 | global_loss: 0.9561545252799988 | total_loss: 2.3849236965179443\n",
      "2025-07-24 06:51:47 | INFO | Step 43: local_loss: 1.056823968887329 | global_loss: 1.3471474647521973 | total_loss: 2.4039714336395264\n",
      "2025-07-24 06:52:47 | INFO | Step 44: local_loss: 1.3416906595230103 | global_loss: 1.1432644128799438 | total_loss: 2.484955072402954\n",
      "2025-07-24 06:53:48 | INFO | Step 45: local_loss: 1.1498801708221436 | global_loss: 0.8818310499191284 | total_loss: 2.0317111015319824\n",
      "2025-07-24 06:54:49 | INFO | Step 46: local_loss: 1.3365572690963745 | global_loss: 0.00046426826156675816 | total_loss: 1.3370215892791748\n",
      "2025-07-24 06:55:51 | INFO | Step 47: local_loss: 1.324241042137146 | global_loss: 0.9762712717056274 | total_loss: 2.3005123138427734\n",
      "2025-07-24 06:56:53 | INFO | Step 48: local_loss: 1.0006530284881592 | global_loss: 0.08988428115844727 | total_loss: 1.0905373096466064\n",
      "2025-07-24 06:57:54 | INFO | Step 49: local_loss: 1.2926231622695923 | global_loss: 0.30758896470069885 | total_loss: 1.6002120971679688\n",
      "2025-07-24 06:58:56 | INFO | Step 50: local_loss: 0.8752410411834717 | global_loss: 0.7338160276412964 | total_loss: 1.609057068824768\n",
      "2025-07-24 06:59:56 | INFO | Step 51: local_loss: 1.1099607944488525 | global_loss: 0.06563372910022736 | total_loss: 1.1755945682525635\n",
      "2025-07-24 07:00:57 | INFO | Step 52: local_loss: 1.0541956424713135 | global_loss: 0.346554696559906 | total_loss: 1.4007503986358643\n",
      "2025-07-24 07:01:44 | INFO | Step 53: local_loss: 0.9611340165138245 | global_loss: 0.16182279586791992 | total_loss: 1.1229567527770996\n",
      "2025-07-24 07:02:42 | INFO | Step 54: local_loss: 1.4284902811050415 | global_loss: 0.25931259989738464 | total_loss: 1.6878029108047485\n",
      "2025-07-24 07:03:44 | INFO | Step 55: local_loss: 1.0679187774658203 | global_loss: 0.31804484128952026 | total_loss: 1.3859636783599854\n",
      "2025-07-24 07:04:45 | INFO | Step 56: local_loss: 1.0545090436935425 | global_loss: 0.8524681925773621 | total_loss: 1.9069771766662598\n",
      "2025-07-24 07:05:47 | INFO | Step 57: local_loss: 1.0435869693756104 | global_loss: 0.20920589566230774 | total_loss: 1.2527928352355957\n",
      "2025-07-24 07:06:50 | INFO | Step 58: local_loss: 1.175647497177124 | global_loss: 0.6766519546508789 | total_loss: 1.852299451828003\n",
      "2025-07-24 07:07:37 | INFO | Step 59: local_loss: 1.0025502443313599 | global_loss: 0.3814927339553833 | total_loss: 1.3840429782867432\n",
      "2025-07-24 07:08:37 | INFO | Step 60: local_loss: 1.2794960737228394 | global_loss: 0.06094011664390564 | total_loss: 1.3404362201690674\n",
      "2025-07-24 07:09:40 | INFO | Step 61: local_loss: 1.1366939544677734 | global_loss: 0.14629407227039337 | total_loss: 1.2829880714416504\n",
      "2025-07-24 07:10:41 | INFO | Step 62: local_loss: 1.1884901523590088 | global_loss: 0.19108493626117706 | total_loss: 1.3795751333236694\n",
      "2025-07-24 07:11:28 | INFO | Step 63: local_loss: 0.9372677206993103 | global_loss: 0.6232126355171204 | total_loss: 1.5604803562164307\n",
      "2025-07-24 07:12:27 | INFO | Step 64: local_loss: 1.563581109046936 | global_loss: 0.0010768512729555368 | total_loss: 1.5646579265594482\n",
      "2025-07-24 07:13:27 | INFO | Step 65: local_loss: 0.7379629611968994 | global_loss: 0.2709638178348541 | total_loss: 1.0089267492294312\n",
      "2025-07-24 07:14:29 | INFO | Step 66: local_loss: 1.503283977508545 | global_loss: 0.002909275470301509 | total_loss: 1.5061932802200317\n",
      "2025-07-24 07:15:30 | INFO | Step 67: local_loss: 1.2183301448822021 | global_loss: 1.0119351148605347 | total_loss: 2.2302651405334473\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-07-24 07:16:31 | INFO | Step 68: local_loss: 1.1047347784042358 | global_loss: 0.0561361126601696 | total_loss: 1.160870909690857\n",
      "2025-07-24 07:17:31 | INFO | Step 69: local_loss: 1.0608742237091064 | global_loss: 0.5606207251548767 | total_loss: 1.621495008468628\n",
      "2025-07-24 07:18:33 | INFO | Step 70: local_loss: 0.9419202208518982 | global_loss: 0.15120413899421692 | total_loss: 1.0931243896484375\n",
      "2025-07-24 07:19:36 | INFO | Step 71: local_loss: 0.8973579406738281 | global_loss: 0.2979258596897125 | total_loss: 1.1952837705612183\n",
      "2025-07-24 07:20:37 | INFO | Step 72: local_loss: 1.2892512083053589 | global_loss: 0.20830458402633667 | total_loss: 1.4975557327270508\n",
      "2025-07-24 07:21:24 | INFO | Step 73: local_loss: 0.994907796382904 | global_loss: 0.01005303394049406 | total_loss: 1.0049607753753662\n",
      "2025-07-24 07:22:23 | INFO | Step 74: local_loss: 1.2132437229156494 | global_loss: 0.01216976996511221 | total_loss: 1.22541344165802\n",
      "2025-07-24 07:23:10 | INFO | Step 75: local_loss: 0.800929844379425 | global_loss: 0.3344041407108307 | total_loss: 1.1353340148925781\n",
      "2025-07-24 07:24:09 | INFO | Step 76: local_loss: 0.8038943409919739 | global_loss: 0.42093828320503235 | total_loss: 1.2248326539993286\n",
      "2025-07-24 07:25:11 | INFO | Step 77: local_loss: 0.6199028491973877 | global_loss: 0.4853511452674866 | total_loss: 1.1052539348602295\n",
      "2025-07-24 07:26:11 | INFO | Step 78: local_loss: 0.7710093259811401 | global_loss: 0.7108995318412781 | total_loss: 1.4819087982177734\n",
      "2025-07-24 07:27:14 | INFO | Step 79: local_loss: 0.9465110301971436 | global_loss: 0.02316991612315178 | total_loss: 0.9696809649467468\n",
      "2025-07-24 07:28:14 | INFO | Step 80: local_loss: 1.0048152208328247 | global_loss: 0.0011361290235072374 | total_loss: 1.0059514045715332\n",
      "2025-07-24 07:29:16 | INFO | Step 81: local_loss: 0.8425077795982361 | global_loss: 0.14423218369483948 | total_loss: 0.986739993095398\n",
      "2025-07-24 07:30:17 | INFO | Step 82: local_loss: 1.027976155281067 | global_loss: 0.3113739490509033 | total_loss: 1.3393501043319702\n",
      "2025-07-24 07:31:17 | INFO | Step 83: local_loss: 0.8703215718269348 | global_loss: 0.10704866051673889 | total_loss: 0.9773702621459961\n",
      "2025-07-24 07:32:19 | INFO | Step 84: local_loss: 0.7587370276451111 | global_loss: 0.7026722431182861 | total_loss: 1.461409330368042\n",
      "2025-07-24 07:33:20 | INFO | Step 85: local_loss: 0.8096461892127991 | global_loss: 0.09315210580825806 | total_loss: 0.9027982950210571\n",
      "2025-07-24 07:34:21 | INFO | Step 86: local_loss: 0.9185685515403748 | global_loss: 0.3056861162185669 | total_loss: 1.2242546081542969\n",
      "2025-07-24 07:35:23 | INFO | Step 87: local_loss: 0.8640953898429871 | global_loss: 0.8801237344741821 | total_loss: 1.7442190647125244\n",
      "2025-07-24 07:36:23 | INFO | Step 88: local_loss: 0.9406142234802246 | global_loss: 0.06735949218273163 | total_loss: 1.0079736709594727\n",
      "2025-07-24 07:37:25 | INFO | Step 89: local_loss: 0.8027146458625793 | global_loss: 0.20532983541488647 | total_loss: 1.0080444812774658\n",
      "2025-07-24 07:38:28 | INFO | Step 90: local_loss: 0.932776153087616 | global_loss: 0.11307108402252197 | total_loss: 1.0458471775054932\n",
      "2025-07-24 07:39:30 | INFO | Step 91: local_loss: 0.7145669460296631 | global_loss: 0.2815840542316437 | total_loss: 0.9961509704589844\n",
      "2025-07-24 07:40:32 | INFO | Step 92: local_loss: 0.7547001242637634 | global_loss: 0.41690999269485474 | total_loss: 1.1716101169586182\n",
      "2025-07-24 07:41:34 | INFO | Step 93: local_loss: 1.2892190217971802 | global_loss: 0.05368261784315109 | total_loss: 1.342901587486267\n",
      "2025-07-24 07:42:35 | INFO | Step 94: local_loss: 0.8247535228729248 | global_loss: 1.0169721841812134 | total_loss: 1.8417257070541382\n",
      "2025-07-24 07:43:37 | INFO | Step 95: local_loss: 1.1701011657714844 | global_loss: 0.17447569966316223 | total_loss: 1.3445768356323242\n",
      "2025-07-24 07:44:38 | INFO | Step 96: local_loss: 0.9744789600372314 | global_loss: 0.02125806361436844 | total_loss: 0.9957370162010193\n",
      "2025-07-24 07:45:41 | INFO | Step 97: local_loss: 0.9513633251190186 | global_loss: 0.22763477265834808 | total_loss: 1.1789981126785278\n",
      "2025-07-24 07:46:43 | INFO | Step 98: local_loss: 1.081587314605713 | global_loss: 0.08772115409374237 | total_loss: 1.1693084239959717\n",
      "2025-07-24 07:47:45 | INFO | Step 99: local_loss: 0.946934163570404 | global_loss: 0.26430943608283997 | total_loss: 1.2112436294555664\n",
      "2025-07-24 07:47:45 | INFO | Center node 3682 finished at step 100 with loss 1.2112\n",
      "2025-07-24 07:47:45 | INFO | \n",
      "=== Starting Fine-tuning ===\n",
      "2025-07-24 07:58:19 | INFO | Epoch: 000 | Loss: 0.0903 | Val AUC: 0.6857 | Val F1: 0.4750 | Val GMean: 0.0000\n",
      "2025-07-24 08:08:59 | INFO | Epoch: 005 | Loss: 0.1281 | Val AUC: 0.7077 | Val F1: 0.4750 | Val GMean: 0.0000\n",
      "2025-07-24 08:19:40 | INFO | Epoch: 010 | Loss: 0.1591 | Val AUC: 0.7574 | Val F1: 0.4750 | Val GMean: 0.0000\n",
      "2025-07-24 08:30:20 | INFO | Epoch: 015 | Loss: 0.0038 | Val AUC: 0.7698 | Val F1: 0.4750 | Val GMean: 0.0000\n",
      "2025-07-24 08:41:00 | INFO | Epoch: 020 | Loss: 0.0378 | Val AUC: 0.7730 | Val F1: 0.4750 | Val GMean: 0.0000\n",
      "2025-07-24 08:51:40 | INFO | Epoch: 025 | Loss: 0.0305 | Val AUC: 0.7794 | Val F1: 0.4750 | Val GMean: 0.0000\n",
      "2025-07-24 09:02:21 | INFO | Epoch: 030 | Loss: 0.0251 | Val AUC: 0.7885 | Val F1: 0.4749 | Val GMean: 0.0000\n",
      "2025-07-24 09:13:01 | INFO | Epoch: 035 | Loss: 0.1293 | Val AUC: 0.7961 | Val F1: 0.4750 | Val GMean: 0.0000\n",
      "2025-07-24 09:23:42 | INFO | Epoch: 040 | Loss: 0.0019 | Val AUC: 0.7949 | Val F1: 0.4750 | Val GMean: 0.0000\n",
      "2025-07-24 09:34:23 | INFO | Epoch: 045 | Loss: 0.0058 | Val AUC: 0.7962 | Val F1: 0.4780 | Val GMean: 0.0589\n",
      "2025-07-24 09:45:03 | INFO | Epoch: 050 | Loss: 0.0229 | Val AUC: 0.7992 | Val F1: 0.4983 | Val GMean: 0.1557\n",
      "2025-07-24 09:55:43 | INFO | Epoch: 055 | Loss: 0.1624 | Val AUC: 0.8003 | Val F1: 0.5411 | Val GMean: 0.3683\n",
      "2025-07-24 10:06:24 | INFO | Epoch: 060 | Loss: 0.0239 | Val AUC: 0.8160 | Val F1: 0.6509 | Val GMean: 0.6176\n",
      "2025-07-24 10:17:05 | INFO | Epoch: 065 | Loss: 0.0538 | Val AUC: 0.8410 | Val F1: 0.7169 | Val GMean: 0.6985\n",
      "2025-07-24 10:27:46 | INFO | Epoch: 070 | Loss: 0.0205 | Val AUC: 0.8501 | Val F1: 0.7497 | Val GMean: 0.6985\n",
      "2025-07-24 10:38:26 | INFO | Epoch: 075 | Loss: 0.0585 | Val AUC: 0.8547 | Val F1: 0.7536 | Val GMean: 0.7076\n",
      "2025-07-24 10:47:35 | INFO | \n",
      "=== Final Test Results ===\n",
      "2025-07-24 10:47:35 | INFO | Test AUC: 0.8340 | Test AP: 0.4748 | Test F1: 0.7235 | G-mean: 0.6786\n"
     ]
    }
   ],
   "source": [
    "\n",
    "\n",
    "# 参数设置\n",
    "args = {\n",
    "    \"dataset\": \"amazon\",\n",
    "    #     \"dataset\": \"yelp\",\n",
    "    \"weight_decay\": 0.00005,\n",
    "    \"seed\": 76,\n",
    "    #     预训练参数\n",
    "    \"pretrain_epochs\": 10,\n",
    "    \"max_steps\": 100,\n",
    "    \"sample_size\": 100,\n",
    "    \"loss_threshold\": 0.1,\n",
    "    \"pretrain_lr\": 0.0015,  # 0.0005，\n",
    "    \"finetune_lr\": 0.0005,\n",
    "    #     分类训练参数\n",
    "    \"batch_size\": 8,\n",
    "    \"num_epochs\": 80,\n",
    "    \"patience\": 30,\n",
    "    \"weight\": 0.6,\n",
    "    \"test_size\": 0.3,\n",
    "    \"val_size\": 0.5,\n",
    "    # 模型结构参数\n",
    "    \"layers_tree\": 7,\n",
    "    \"num_heads\": 4,\n",
    "    \"num_layers\": 2,\n",
    "    \"drop_rate\": 0.5\n",
    "}\n",
    "\n",
    "# 创建 logger\n",
    "logger = setup_logger()\n",
    "logger.info('============  BSNE Training  ============')\n",
    "logger.info('Args:\\n' + json.dumps(args, indent=2, ensure_ascii=False))\n",
    "\n",
    "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n",
    "logger.info(device)\n",
    "logger.info('loading data...')\n",
    "prefix = \"../../data/\"\n",
    "\n",
    "edge_indexs, feat_data, labels = load_data(args['dataset'], args['layers_tree'], prefix)\n",
    "\n",
    "np.random.seed(args['seed'])\n",
    "rd.seed(args['seed'])\n",
    "\n",
    "if args['dataset'] == 'yelp':\n",
    "    index = list(range(len(labels)))\n",
    "    idx_train_val, idx_test, y_train_val, y_test = train_test_split(index, labels, stratify=labels,\n",
    "                                                                    test_size=args['test_size'], random_state=2,\n",
    "                                                                    shuffle=True)\n",
    "    idx_train, idx_val, y_train, y_val = train_test_split(idx_train_val, y_train_val,\n",
    "                                                          stratify=y_train_val, test_size=args['val_size'],\n",
    "                                                          random_state=2, shuffle=True)\n",
    "    dist_path = os.path.join(prefix, \"YelpChi_shortest_distance.pkl\")\n",
    "elif args['dataset'] == 'amazon':\n",
    "    index = list(range(3305, len(labels)))\n",
    "    idx_train_val, idx_test, y_train_val, y_test = train_test_split(index, labels[3305:],\n",
    "                                                                    stratify=labels[3305:],\n",
    "                                                                    test_size=args['test_size'],\n",
    "                                                                    random_state=2, shuffle=True)\n",
    "    idx_train, idx_val, y_train, y_val = train_test_split(idx_train_val, y_train_val,\n",
    "                                                          stratify=y_train_val, test_size=args['val_size'],\n",
    "                                                          random_state=2, shuffle=True)\n",
    "    dist_path = os.path.join(prefix, \"Amazon_shortest_distance.pkl\")\n",
    "\n",
    "with open(dist_path, 'rb') as f:\n",
    "    dist_data = pickle.load(f)\n",
    "    dist_matrix = torch.tensor(dist_data['dist_matrix']).to(device)\n",
    "\n",
    "# 准备特征数据\n",
    "feat_data = torch.tensor(feat_data).float()\n",
    "# 特征归一化\n",
    "scaler = MinMaxScaler()\n",
    "feat_data = torch.tensor(scaler.fit_transform(feat_data)).float().to(device)\n",
    "\n",
    "# 初始化模型\n",
    "bsne_model = BSNE_Transformer(\n",
    "    in_feat=feat_data.shape[1],\n",
    "    out_feat=2,\n",
    "    relation_nums=len(edge_indexs),\n",
    "    d_model=64,\n",
    "    nhead=args['num_heads'],\n",
    "    num_layers=args['num_layers'],\n",
    "    dim_feedforward=256,\n",
    "    drop_rate=args['drop_rate']\n",
    ").to(device)\n",
    "\n",
    "# 将边索引转移到设备\n",
    "for edge_index in edge_indexs:\n",
    "    edge_index[0] = edge_index[0].to(device)\n",
    "    edge_index[1] = [tensor.to(device) for tensor in edge_index[1]]\n",
    "\n",
    "logger.info(\"\\n=== Starting Pretraining ===\")\n",
    "\n",
    "bsne_model.classifier.requires_grad_(False)\n",
    "optimizer = torch.optim.AdamW(\n",
    "    filter(lambda p: p.requires_grad, bsne_model.parameters()),\n",
    "    lr=args['pretrain_lr'],\n",
    "    weight_decay=args[\"weight_decay\"]\n",
    ")\n",
    "pretrain_best_loss = float('inf')\n",
    "pretrain_no_improve = 0\n",
    "pretrain_early_stop = False\n",
    "\n",
    "temperature = 0.3  # 越小区分性越强\n",
    "\n",
    "loss_threshold = args['loss_threshold']\n",
    "sample_size = args['sample_size']\n",
    "max_steps = args['max_steps']\n",
    "max_epochs = args['pretrain_epochs']\n",
    "center_indices = list(range(feat_data.shape[0]))\n",
    "# 在每轮epoch前随机打乱中心点顺序\n",
    "rd.shuffle(center_indices)\n",
    "\n",
    "# 限制训练的中心点数量\n",
    "center_indices = center_indices[:max_epochs]\n",
    "for epoch, center_idx in enumerate(center_indices):\n",
    "    logger.info(f\"\\n=== Pretraining Epoch {epoch} (Center Node: {center_idx}) ===\")\n",
    "    step = 0\n",
    "\n",
    "    dist_row = dist_matrix[center_idx].cpu().numpy()\n",
    "    probs = np.exp(-dist_row / temperature)\n",
    "\n",
    "    probs[center_idx] = 0\n",
    "    probs = probs / (probs.sum() + 1e-10)\n",
    "\n",
    "    available_nodes = len(dist_row) - 1\n",
    "\n",
    "    # 构建Bp子图\n",
    "    actual_sample_size = min(sample_size, available_nodes)\n",
    "    if actual_sample_size > 0:\n",
    "        neighbors = np.random.choice(len(dist_row), size=actual_sample_size, p=probs, replace=False)\n",
    "        bp_nodes = neighbors.tolist()\n",
    "\n",
    "    # 构建Bu子图\n",
    "    if actual_sample_size > 0:\n",
    "        neighbors = np.random.choice(len(dist_row), size=actual_sample_size, replace=False)\n",
    "        bu_nodes = neighbors.tolist()\n",
    "\n",
    "    while True:\n",
    "        bsne_model.train()\n",
    "        optimizer.zero_grad()\n",
    "\n",
    "        total_loss = 0.0  # 每个step都要重置\n",
    "        eps = 1e-10\n",
    "\n",
    "        # 计算Bp子图中所有节点的特征\n",
    "        bp_node_features = []\n",
    "        for node_idx in bp_nodes:\n",
    "            node_subgraph = create_node_subgraph(node_idx, feat_data, edge_indexs, device)\n",
    "            _, node_feature = bsne_model([node_subgraph])\n",
    "            bp_node_features.append(node_feature.squeeze(0))\n",
    "        bp_features = torch.stack(bp_node_features)\n",
    "\n",
    "        center_node_subgraph = create_node_subgraph(center_idx, feat_data, edge_indexs, device)\n",
    "        _, center_feature = bsne_model([center_node_subgraph])\n",
    "        center_feature = center_feature.squeeze(0)\n",
    "\n",
    "\n",
    "\n",
    "        P = torch.tensor(probs[bp_nodes], device=device, dtype=torch.float32)\n",
    "        P = P/ P.sum()\n",
    "        # print(P)\n",
    "        # p_entropy = -torch.sum(P * torch.log(P)).item()\n",
    "        # print(f\"p_entropy: {p_entropy:.4f}\")\n",
    "\n",
    "        # 计算Q向量（欧式距离）\n",
    "        feat_dists_bp = torch.cdist(center_feature.unsqueeze(0), bp_features).squeeze(0)\n",
    "        Q = torch.softmax(-feat_dists_bp, dim=0)\n",
    "        log_ratio = (torch.log(P / Q)) ** 2\n",
    "        # log_ratio = torch.log(P / Q)\n",
    "        loss_local = log_ratio.mean()\n",
    "\n",
    "        # 全局loss计算\n",
    "        # 计算Bu子图中所有节点的特征\n",
    "        bu_node_features = []\n",
    "        for node_idx in bu_nodes:\n",
    "            node_subgraph = create_node_subgraph(node_idx, feat_data, edge_indexs, device)\n",
    "            _, node_feature = bsne_model([node_subgraph])\n",
    "            bu_node_features.append(node_feature.squeeze(0))\n",
    "        bu_features = torch.stack(bu_node_features)\n",
    "\n",
    "        feat_dists_bu = torch.cdist(center_feature.unsqueeze(0), bu_features).squeeze(0)\n",
    "        sum_e_bp = torch.exp(-feat_dists_bp).sum()\n",
    "        sum_e_bu = torch.exp(-feat_dists_bu).sum()\n",
    "\n",
    "        N = len(probs)\n",
    "        k_Bp = probs[bp_nodes].sum() * (N / len(bp_nodes))\n",
    "        loss_global = (torch.log(k_Bp*sum_e_bu/sum_e_bp))**2\n",
    "        #         #加上平方项\n",
    "        #         loss_global = (torch.log(global_ratio.clamp(min=eps, max=1e10)))**2\n",
    "        total_loss = loss_local + loss_global\n",
    "        # total_loss = loss_local\n",
    "        total_loss.backward()\n",
    "        optimizer.step()\n",
    "\n",
    "        logger.info(f\"Step {step}: local_loss: {loss_local.item()} | global_loss: {loss_global.item()} | total_loss: {total_loss.item()}\")\n",
    "        step += 1\n",
    "\n",
    "        if total_loss.item() < loss_threshold or step >= max_steps:\n",
    "            logger.info(f\"Center node {center_idx} finished at step {step} with loss {total_loss.item():.4f}\")\n",
    "            break\n",
    "\n",
    "\n",
    "logger.info(\"\\n=== Starting Fine-tuning ===\")\n",
    "bsne_model.classifier.requires_grad_(True)\n",
    "optimizer = torch.optim.AdamW(\n",
    "    bsne_model.parameters(),\n",
    "    lr=args['finetune_lr'],\n",
    "    weight_decay=args[\"weight_decay\"]\n",
    ")\n",
    "scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(\n",
    "    optimizer, mode='max', factor=0.5, patience=10, verbose=True\n",
    ")\n",
    "\n",
    "best_val_auc = 0.0\n",
    "best_model_state = None\n",
    "train_pos, train_neg = pos_neg_split(idx_train, y_train)\n",
    "\n",
    "no_improve_epochs = 0\n",
    "early_stop = False\n",
    "\n",
    "for epoch in range(args['num_epochs']):\n",
    "    if early_stop:\n",
    "        break\n",
    "\n",
    "    bsne_model.train()\n",
    "    total_loss = 0.0\n",
    "\n",
    "    # 采样中心节点\n",
    "    batch_centers = rd.sample(train_pos + train_neg, args['batch_size'])\n",
    "    subgraph_data = []\n",
    "\n",
    "    for xi in batch_centers:\n",
    "        subgraph = create_node_subgraph(xi, feat_data, edge_indexs, device)\n",
    "        subgraph_data.append(subgraph)\n",
    "\n",
    "    optimizer.zero_grad()\n",
    "\n",
    "    center_logits, _ = bsne_model(subgraph_data)  # [B, 2]\n",
    "\n",
    "    # 提取中心节点标签\n",
    "    labels_center = torch.tensor([labels[xi] for xi in batch_centers]).to(device).long()\n",
    "\n",
    "    # 计算分类损失\n",
    "    cls_loss = F.nll_loss(center_logits, labels_center)\n",
    "\n",
    "    cls_loss.backward()\n",
    "    optimizer.step()\n",
    "\n",
    "    total_loss += cls_loss.item()\n",
    "\n",
    "    avg_loss = total_loss / args['batch_size']\n",
    "\n",
    "    if epoch % 5 == 0:\n",
    "        val_auc, val_ap, val_f1, val_g_mean = test(idx_val, y_val, bsne_model, feat_data, edge_indexs, device)\n",
    "\n",
    "        logger.info(f'Epoch: {epoch:03d} | Loss: {avg_loss:.4f} | Val AUC: {val_auc:.4f} | Val F1: {val_f1:.4f} | Val GMean: {val_g_mean:.4f}')\n",
    "\n",
    "        scheduler.step(val_auc)\n",
    "\n",
    "        if val_auc > best_val_auc:\n",
    "            best_val_auc = val_auc\n",
    "            no_improve_epochs = 0\n",
    "            best_model_state = copy.deepcopy(bsne_model.state_dict())\n",
    "        else:\n",
    "            no_improve_epochs += 1\n",
    "\n",
    "        if no_improve_epochs >= args['patience']:\n",
    "            logger.info(f\"Early stopping at epoch {epoch}\")\n",
    "            early_stop = True\n",
    "\n",
    "# 加载最佳模型\n",
    "bsne_model.load_state_dict(best_model_state)\n",
    "\n",
    "# 最终测试\n",
    "test_auc, test_ap, test_f1, test_g_mean = test(idx_test, y_test, bsne_model, feat_data, edge_indexs, device)\n",
    "logger.info(f'\\n=== Final Test Results ===')\n",
    "logger.info(f'Test AUC: {test_auc:.4f} | Test AP: {test_ap:.4f} | Test F1: {test_f1:.4f} | G-mean: {test_g_mean:.4f}')\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "2e209f25",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2025-07-24 19:08:54 | INFO | ============  BSNE Training  ============\n",
      "2025-07-24 19:08:54 | INFO | Args:\n",
      "{\n",
      "  \"dataset\": \"amazon\",\n",
      "  \"weight_decay\": 5e-05,\n",
      "  \"seed\": 76,\n",
      "  \"pretrain_epochs\": 10,\n",
      "  \"max_steps\": 100,\n",
      "  \"sample_size\": 100,\n",
      "  \"loss_threshold\": 0.1,\n",
      "  \"pretrain_lr\": 0.0015,\n",
      "  \"finetune_lr\": 0.0001,\n",
      "  \"batch_size\": 32,\n",
      "  \"num_epochs\": 80,\n",
      "  \"patience\": 30,\n",
      "  \"weight\": 0.6,\n",
      "  \"test_size\": 0.3,\n",
      "  \"val_size\": 0.5,\n",
      "  \"layers_tree\": 7,\n",
      "  \"num_heads\": 4,\n",
      "  \"num_layers\": 2,\n",
      "  \"drop_rate\": 0.5\n",
      "}\n",
      "2025-07-24 19:08:54 | INFO | cuda\n",
      "2025-07-24 19:08:54 | INFO | loading data...\n",
      "2025-07-24 19:09:45 | INFO | \n",
      "=== Starting Fine-tuning ===\n",
      "2025-07-24 19:20:29 | INFO | Epoch: 000 | Loss: 0.0243 | Val AUC: 0.5893 | Val F1: 0.4878 | Val GMean: 0.1177\n",
      "2025-07-24 19:31:41 | INFO | Epoch: 005 | Loss: 0.0116 | Val AUC: 0.7853 | Val F1: 0.4750 | Val GMean: 0.0000\n",
      "2025-07-24 19:42:54 | INFO | Epoch: 010 | Loss: 0.0100 | Val AUC: 0.8358 | Val F1: 0.4750 | Val GMean: 0.0000\n",
      "2025-07-24 19:54:07 | INFO | Epoch: 015 | Loss: 0.0081 | Val AUC: 0.8489 | Val F1: 0.4750 | Val GMean: 0.0000\n",
      "2025-07-24 20:05:19 | INFO | Epoch: 020 | Loss: 0.0045 | Val AUC: 0.8543 | Val F1: 0.4750 | Val GMean: 0.0000\n",
      "2025-07-24 20:16:32 | INFO | Epoch: 025 | Loss: 0.0149 | Val AUC: 0.8576 | Val F1: 0.4750 | Val GMean: 0.0000\n",
      "2025-07-24 20:27:44 | INFO | Epoch: 030 | Loss: 0.0086 | Val AUC: 0.8597 | Val F1: 0.4750 | Val GMean: 0.0000\n",
      "2025-07-24 20:38:56 | INFO | Epoch: 035 | Loss: 0.0061 | Val AUC: 0.8617 | Val F1: 0.4750 | Val GMean: 0.0000\n",
      "2025-07-24 20:50:08 | INFO | Epoch: 040 | Loss: 0.0059 | Val AUC: 0.8622 | Val F1: 0.4750 | Val GMean: 0.0000\n"
     ]
    }
   ],
   "source": [
    "\n",
    "\n",
    "# 参数设置\n",
    "args = {\n",
    "    \"dataset\": \"amazon\",\n",
    "    #     \"dataset\": \"yelp\",\n",
    "    \"weight_decay\": 0.00005,\n",
    "    \"seed\": 76,\n",
    "    #     预训练参数\n",
    "    \"pretrain_epochs\": 10,\n",
    "    \"max_steps\": 100,\n",
    "    \"sample_size\": 100,\n",
    "    \"loss_threshold\": 0.1,\n",
    "    \"pretrain_lr\": 0.0015,  # 0.0005，\n",
    "    \"finetune_lr\": 0.0001,\n",
    "    #     分类训练参数\n",
    "    \"batch_size\": 32,\n",
    "    \"num_epochs\": 80,\n",
    "    \"patience\": 30,\n",
    "    \"weight\": 0.6,\n",
    "    \"test_size\": 0.3,\n",
    "    \"val_size\": 0.5,\n",
    "    # 模型结构参数\n",
    "    \"layers_tree\": 7,\n",
    "    \"num_heads\": 4,\n",
    "    \"num_layers\": 2,\n",
    "    \"drop_rate\": 0.5\n",
    "}\n",
    "\n",
    "# 创建 logger\n",
    "logger = setup_logger()\n",
    "logger.info('============  BSNE Training  ============')\n",
    "logger.info('Args:\\n' + json.dumps(args, indent=2, ensure_ascii=False))\n",
    "\n",
    "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n",
    "logger.info(device)\n",
    "logger.info('loading data...')\n",
    "prefix = \"../../data/\"\n",
    "\n",
    "edge_indexs, feat_data, labels = load_data(args['dataset'], args['layers_tree'], prefix)\n",
    "\n",
    "np.random.seed(args['seed'])\n",
    "rd.seed(args['seed'])\n",
    "\n",
    "if args['dataset'] == 'yelp':\n",
    "    index = list(range(len(labels)))\n",
    "    idx_train_val, idx_test, y_train_val, y_test = train_test_split(index, labels, stratify=labels,\n",
    "                                                                    test_size=args['test_size'], random_state=2,\n",
    "                                                                    shuffle=True)\n",
    "    idx_train, idx_val, y_train, y_val = train_test_split(idx_train_val, y_train_val,\n",
    "                                                          stratify=y_train_val, test_size=args['val_size'],\n",
    "                                                          random_state=2, shuffle=True)\n",
    "    dist_path = os.path.join(prefix, \"YelpChi_shortest_distance.pkl\")\n",
    "elif args['dataset'] == 'amazon':\n",
    "    index = list(range(3305, len(labels)))\n",
    "    idx_train_val, idx_test, y_train_val, y_test = train_test_split(index, labels[3305:],\n",
    "                                                                    stratify=labels[3305:],\n",
    "                                                                    test_size=args['test_size'],\n",
    "                                                                    random_state=2, shuffle=True)\n",
    "    idx_train, idx_val, y_train, y_val = train_test_split(idx_train_val, y_train_val,\n",
    "                                                          stratify=y_train_val, test_size=args['val_size'],\n",
    "                                                          random_state=2, shuffle=True)\n",
    "    dist_path = os.path.join(prefix, \"Amazon_shortest_distance.pkl\")\n",
    "\n",
    "with open(dist_path, 'rb') as f:\n",
    "    dist_data = pickle.load(f)\n",
    "    dist_matrix = torch.tensor(dist_data['dist_matrix']).to(device)\n",
    "\n",
    "# 准备特征数据\n",
    "feat_data = torch.tensor(feat_data).float()\n",
    "# 特征归一化\n",
    "scaler = MinMaxScaler()\n",
    "feat_data = torch.tensor(scaler.fit_transform(feat_data)).float().to(device)\n",
    "\n",
    "# 初始化模型\n",
    "bsne_model = BSNE_Transformer(\n",
    "    in_feat=feat_data.shape[1],\n",
    "    out_feat=2,\n",
    "    relation_nums=len(edge_indexs),\n",
    "    d_model=64,\n",
    "    nhead=args['num_heads'],\n",
    "    num_layers=args['num_layers'],\n",
    "    dim_feedforward=256,\n",
    "    drop_rate=args['drop_rate']\n",
    ").to(device)\n",
    "\n",
    "# 将边索引转移到设备\n",
    "for edge_index in edge_indexs:\n",
    "    edge_index[0] = edge_index[0].to(device)\n",
    "    edge_index[1] = [tensor.to(device) for tensor in edge_index[1]]\n",
    "\n",
    "\n",
    "logger.info(\"\\n=== Starting Fine-tuning ===\")\n",
    "bsne_model.classifier.requires_grad_(True)\n",
    "optimizer = torch.optim.AdamW(\n",
    "    bsne_model.parameters(),\n",
    "    lr=args['finetune_lr'],\n",
    "    weight_decay=args[\"weight_decay\"]\n",
    ")\n",
    "scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(\n",
    "    optimizer, mode='max', factor=0.5, patience=10, verbose=True\n",
    ")\n",
    "\n",
    "best_val_auc = 0.0\n",
    "best_model_state = None\n",
    "train_pos, train_neg = pos_neg_split(idx_train, y_train)\n",
    "\n",
    "no_improve_epochs = 0\n",
    "early_stop = False\n",
    "\n",
    "for epoch in range(args['num_epochs']):\n",
    "    if early_stop:\n",
    "        break\n",
    "\n",
    "    bsne_model.train()\n",
    "    total_loss = 0.0\n",
    "\n",
    "    # 采样中心节点\n",
    "    batch_centers = rd.sample(train_pos + train_neg, args['batch_size'])\n",
    "    subgraph_data = []\n",
    "\n",
    "    for xi in batch_centers:\n",
    "        subgraph = create_node_subgraph(xi, feat_data, edge_indexs, device)\n",
    "        subgraph_data.append(subgraph)\n",
    "\n",
    "    optimizer.zero_grad()\n",
    "\n",
    "    center_logits, _ = bsne_model(subgraph_data)  # [B, 2]\n",
    "\n",
    "    # 提取中心节点标签\n",
    "    labels_center = torch.tensor([labels[xi] for xi in batch_centers]).to(device).long()\n",
    "\n",
    "    # 计算分类损失\n",
    "    cls_loss = F.nll_loss(center_logits, labels_center)\n",
    "\n",
    "    cls_loss.backward()\n",
    "    optimizer.step()\n",
    "\n",
    "    total_loss += cls_loss.item()\n",
    "\n",
    "    avg_loss = total_loss / args['batch_size']\n",
    "\n",
    "    if epoch % 5 == 0:\n",
    "        val_auc, val_ap, val_f1, val_g_mean = test(idx_val, y_val, bsne_model, feat_data, edge_indexs, device)\n",
    "\n",
    "        logger.info(f'Epoch: {epoch:03d} | Loss: {avg_loss:.4f} | Val AUC: {val_auc:.4f} | Val F1: {val_f1:.4f} | Val GMean: {val_g_mean:.4f}')\n",
    "\n",
    "        scheduler.step(val_auc)\n",
    "\n",
    "        if val_auc > best_val_auc:\n",
    "            best_val_auc = val_auc\n",
    "            no_improve_epochs = 0\n",
    "            best_model_state = copy.deepcopy(bsne_model.state_dict())\n",
    "        else:\n",
    "            no_improve_epochs += 1\n",
    "\n",
    "        if no_improve_epochs >= args['patience']:\n",
    "            logger.info(f\"Early stopping at epoch {epoch}\")\n",
    "            early_stop = True\n",
    "\n",
    "# 加载最佳模型\n",
    "bsne_model.load_state_dict(best_model_state)\n",
    "\n",
    "# 最终测试\n",
    "test_auc, test_ap, test_f1, test_g_mean = test(idx_test, y_test, bsne_model, feat_data, edge_indexs, device)\n",
    "logger.info(f'\\n=== Final Test Results ===')\n",
    "logger.info(f'Test AUC: {test_auc:.4f} | Test AP: {test_ap:.4f} | Test F1: {test_f1:.4f} | G-mean: {test_g_mean:.4f}')\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "fcd0ceff",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "0328eee6",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "fdbc6310",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "tsne01",
   "language": "python",
   "name": "tsne01"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.11.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
