{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "d7bd4dbc-d5ed-4c07-a7c3-23770343a6c3",
   "metadata": {
    "collapsed": true,
    "jupyter": {
     "outputs_hidden": true
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Setting Splits Directory... ./splits/./10foldcv_subtype_redo/p75/1/tcga_kidney\n",
      "\n",
      "Load Dataset\n",
      "tcga_kidney_subtype\n",
      "label column: oncotree_code\n",
      "label dictionary: {'CCRCC': 0, 'PRCC': 1, 'CHRCC': 2}\n",
      "number of classes: 3\n",
      "slide-level counts:  \n",
      " label\n",
      "1    289\n",
      "0    498\n",
      "2    118\n",
      "Name: count, dtype: int64\n",
      "Patient-LVL; Number of samples registered in class 0: 492\n",
      "Slide-LVL; Number of samples registered in class 0: 498\n",
      "Patient-LVL; Number of samples registered in class 1: 267\n",
      "Slide-LVL; Number of samples registered in class 1: 289\n",
      "Patient-LVL; Number of samples registered in class 2: 107\n",
      "Slide-LVL; Number of samples registered in class 2: 118\n",
      "Slide data in geneic mil      Unnamed: 0       case_id  \\\n",
      "0             0  TCGA-4A-A93X   \n",
      "1             1  TCGA-B3-4104   \n",
      "2             2  TCGA-BP-4963   \n",
      "3             3  TCGA-BP-5170   \n",
      "4             4  TCGA-BP-5175   \n",
      "..          ...           ...   \n",
      "900         926  TCGA-B0-5092   \n",
      "901         927  TCGA-A3-3326   \n",
      "902         928  TCGA-BP-4161   \n",
      "903         929  TCGA-BP-4338   \n",
      "904         930  TCGA-KL-8325   \n",
      "\n",
      "                                              slide_id oncotree_code site  \\\n",
      "0    TCGA-4A-A93X-01Z-00-DX2.45011BF1-FED8-4D22-B5E...          PRCC   4A   \n",
      "1    TCGA-B3-4104-01Z-00-DX1.0783e269-2e8a-4f32-b91...          PRCC   B3   \n",
      "2    TCGA-BP-4963-01Z-00-DX1.7e206961-5271-40d3-a96...         CCRCC   BP   \n",
      "3    TCGA-BP-5170-01Z-00-DX1.ae43bef7-3d81-4f69-be3...         CCRCC   BP   \n",
      "4    TCGA-BP-5175-01Z-00-DX1.e954ae94-307c-475e-9f6...         CCRCC   BP   \n",
      "..                                                 ...           ...  ...   \n",
      "900  TCGA-B0-5092-01Z-00-DX1.9b3a2c5c-4614-41bc-973...         CCRCC   B0   \n",
      "901  TCGA-A3-3326-01Z-00-DX1.2caeb5df-3522-4205-80c...         CCRCC   A3   \n",
      "902  TCGA-BP-4161-01Z-00-DX1.a5c24186-a438-4c65-857...         CCRCC   BP   \n",
      "903  TCGA-BP-4338-01Z-00-DX1.fbadb1d7-c11d-4347-9f3...         CCRCC   BP   \n",
      "904  TCGA-KL-8325-01Z-00-DX1.330bdb6c-86ee-4577-984...         CHRCC   KL   \n",
      "\n",
      "      age  survival_months  is_female  censorship race label  \n",
      "0    58.0            12.81        0.0         1.0    W     1  \n",
      "1    75.0            34.46        0.0         1.0    W     1  \n",
      "2    63.0            60.25        0.0         1.0    W     0  \n",
      "3    55.0            79.24        0.0         1.0    W     0  \n",
      "4    60.0            30.62        0.0         1.0    W     0  \n",
      "..    ...              ...        ...         ...  ...   ...  \n",
      "900  53.0            15.08        1.0         0.0    W     0  \n",
      "901  47.0            37.35        0.0         1.0    W     0  \n",
      "902  74.0            90.21        0.0         1.0    W     0  \n",
      "903  43.0            93.92        0.0         1.0    W     0  \n",
      "904  56.0            23.82        1.0         0.0    W     2  \n",
      "\n",
      "[905 rows x 11 columns] ./RCC/FEATURES_DIRECTORY_beph/pt_files/\n",
      "split_dir:  ./splits/./10foldcv_subtype_redo/p75/1/tcga_kidney\n",
      "################# Settings ###################\n",
      "num_splits:  10\n",
      "k_start:  0\n",
      "k_end:  -1\n",
      "task:  tcga_kidney_subtype\n",
      "max_epochs:  20\n",
      "results_dir:  ./results_subtyping/p75/kidney/5e-5_beph\n",
      "lr:  5e-05\n",
      "experiment:  tcga_kidney_subtype_clam_sb_vits_tcga_pancancer_dino_1.00\n",
      "reg:  0.01\n",
      "label_frac:  1.0\n",
      "bag_loss:  ce\n",
      "seed:  123\n",
      "model_type:  clam_sb\n",
      "model_size:  small\n",
      "use_drop_out:  True\n",
      "weighted_sample:  True\n",
      "opt:  adam\n",
      "bag_weight:  0.7\n",
      "inst_loss:  svm\n",
      "B:  8\n",
      "split_dir:  ./splits/./10foldcv_subtype_redo/p75/1/tcga_kidney\n",
      "   Unnamed: 0       case_id  \\\n",
      "0           0  TCGA-4A-A93X   \n",
      "1           1  TCGA-B3-4104   \n",
      "2           2  TCGA-BP-4963   \n",
      "3           3  TCGA-BP-5170   \n",
      "4           4  TCGA-BP-5175   \n",
      "\n",
      "                                            slide_id oncotree_code site   age  \\\n",
      "0  TCGA-4A-A93X-01Z-00-DX2.45011BF1-FED8-4D22-B5E...          PRCC   4A  58.0   \n",
      "1  TCGA-B3-4104-01Z-00-DX1.0783e269-2e8a-4f32-b91...          PRCC   B3  75.0   \n",
      "2  TCGA-BP-4963-01Z-00-DX1.7e206961-5271-40d3-a96...         CCRCC   BP  63.0   \n",
      "3  TCGA-BP-5170-01Z-00-DX1.ae43bef7-3d81-4f69-be3...         CCRCC   BP  55.0   \n",
      "4  TCGA-BP-5175-01Z-00-DX1.e954ae94-307c-475e-9f6...         CCRCC   BP  60.0   \n",
      "\n",
      "   survival_months  is_female  censorship race label  \n",
      "0            12.81        0.0         1.0    W     1  \n",
      "1            34.46        0.0         1.0    W     1  \n",
      "2            60.25        0.0         1.0    W     0  \n",
      "3            79.24        0.0         1.0    W     0  \n",
      "4            30.62        0.0         1.0    W     0  \n",
      "Traing Data Size ({1.00}): 548 ./RCC/FEATURES_DIRECTORY_beph/pt_files/\n",
      "Cluster file missing\n",
      "Traing Data Size ({1.00}): 61 ./RCC/FEATURES_DIRECTORY_beph/pt_files/\n",
      "Cluster file missing\n",
      "Traing Data Size ({1.00}): 91 ./RCC/FEATURES_DIRECTORY_beph/pt_files/\n",
      "Cluster file missing\n",
      "\n",
      "Training Fold 0!\n",
      "\n",
      "Init train/val/test splits... \n",
      "Done!\n",
      "Training on 548 samples\n",
      "Validating on 61 samples\n",
      "Testing on 91 samples\n",
      "\n",
      "Init loss function... Done!\n",
      "\n",
      "Init Model... Setting tau to 1.0\n",
      "Done!\n",
      "CLAM_SB(\n",
      "  (attention_net): Sequential(\n",
      "    (0): Linear(in_features=384, out_features=384, bias=True)\n",
      "    (1): ReLU()\n",
      "    (2): Dropout(p=0.25, inplace=False)\n",
      "    (3): Attn_Net_Gated(\n",
      "      (attention_a): Sequential(\n",
      "        (0): Linear(in_features=384, out_features=256, bias=True)\n",
      "        (1): Tanh()\n",
      "        (2): Dropout(p=0.25, inplace=False)\n",
      "      )\n",
      "      (attention_b): Sequential(\n",
      "        (0): Linear(in_features=384, out_features=256, bias=True)\n",
      "        (1): Sigmoid()\n",
      "        (2): Dropout(p=0.25, inplace=False)\n",
      "      )\n",
      "      (attention_c): Linear(in_features=256, out_features=1, bias=True)\n",
      "    )\n",
      "  )\n",
      "  (feature_linear1): Linear(in_features=768, out_features=384, bias=True)\n",
      "  (classifiers): Linear(in_features=384, out_features=3, bias=True)\n",
      "  (instance_classifiers): ModuleList(\n",
      "    (0): Linear(in_features=384, out_features=2, bias=True)\n",
      "    (1): Linear(in_features=384, out_features=2, bias=True)\n",
      "    (2): Linear(in_features=384, out_features=2, bias=True)\n",
      "  )\n",
      "  (instance_loss_fn): SmoothTop1SVM()\n",
      ")\n",
      "Total number of parameters: 643978\n",
      "Total number of trainable parameters: 643978\n",
      "\n",
      "Init optimizer ... Done!\n",
      "\n",
      "Init Loaders... Done!\n",
      "\n",
      "Setup EarlyStopping... Done!\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.6807, instance_loss: 0.9329, weighted_loss: 0.7563, label: 2, bag_size: 59810\n",
      "batch 39, loss: 0.6900, instance_loss: 0.7041, weighted_loss: 0.6942, label: 0, bag_size: 58969\n",
      "batch 59, loss: 1.3150, instance_loss: 0.7179, weighted_loss: 1.1359, label: 1, bag_size: 56911\n",
      "batch 79, loss: 0.1933, instance_loss: 0.1923, weighted_loss: 0.1930, label: 0, bag_size: 75202\n",
      "batch 99, loss: 0.7795, instance_loss: 0.5019, weighted_loss: 0.6962, label: 2, bag_size: 87135\n",
      "batch 119, loss: 0.1716, instance_loss: 0.2737, weighted_loss: 0.2023, label: 1, bag_size: 28349\n",
      "batch 139, loss: 0.1160, instance_loss: 0.0882, weighted_loss: 0.1076, label: 1, bag_size: 7532\n",
      "batch 159, loss: 1.8770, instance_loss: 0.0615, weighted_loss: 1.3323, label: 0, bag_size: 90911\n",
      "batch 179, loss: 0.1920, instance_loss: 0.0921, weighted_loss: 0.1620, label: 0, bag_size: 76402\n",
      "batch 199, loss: 0.1185, instance_loss: 0.0257, weighted_loss: 0.0907, label: 0, bag_size: 59101\n",
      "batch 219, loss: 0.8868, instance_loss: 0.3635, weighted_loss: 0.7298, label: 1, bag_size: 8990\n",
      "batch 239, loss: 0.4562, instance_loss: 0.0379, weighted_loss: 0.3307, label: 2, bag_size: 51017\n",
      "batch 259, loss: 1.4163, instance_loss: 0.0274, weighted_loss: 0.9996, label: 0, bag_size: 41723\n",
      "batch 279, loss: 0.1773, instance_loss: 0.0194, weighted_loss: 0.1299, label: 2, bag_size: 50045\n",
      "batch 299, loss: 1.2181, instance_loss: 0.0250, weighted_loss: 0.8602, label: 2, bag_size: 52584\n",
      "batch 319, loss: 0.3040, instance_loss: 0.0183, weighted_loss: 0.2183, label: 2, bag_size: 54040\n",
      "batch 339, loss: 0.0576, instance_loss: 0.0337, weighted_loss: 0.0504, label: 1, bag_size: 4771\n",
      "batch 359, loss: 0.9138, instance_loss: 0.0371, weighted_loss: 0.6508, label: 0, bag_size: 50823\n",
      "batch 379, loss: 2.7082, instance_loss: 0.0768, weighted_loss: 1.9188, label: 1, bag_size: 87674\n",
      "batch 399, loss: 0.1643, instance_loss: 0.0130, weighted_loss: 0.1189, label: 2, bag_size: 55626\n",
      "batch 419, loss: 0.2611, instance_loss: 0.0416, weighted_loss: 0.1952, label: 2, bag_size: 84436\n",
      "batch 439, loss: 0.6555, instance_loss: 0.0280, weighted_loss: 0.4673, label: 1, bag_size: 108492\n",
      "batch 459, loss: 0.0369, instance_loss: 0.0055, weighted_loss: 0.0274, label: 2, bag_size: 79373\n",
      "batch 479, loss: 0.5837, instance_loss: 0.0263, weighted_loss: 0.4165, label: 2, bag_size: 65317\n",
      "batch 499, loss: 0.4471, instance_loss: 0.0196, weighted_loss: 0.3189, label: 2, bag_size: 59810\n",
      "batch 519, loss: 1.0632, instance_loss: 0.0075, weighted_loss: 0.7465, label: 0, bag_size: 99876\n",
      "batch 539, loss: 1.2670, instance_loss: 0.0068, weighted_loss: 0.8890, label: 2, bag_size: 65340\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.9614507299270073: correct 4215/4384\n",
      "class 1 clustering acc 0.9142335766423357: correct 4008/4384\n",
      "Epoch: 0, train_loss: 0.6498, train_clustering_loss:  0.2582, train_error: 0.2573\n",
      "class 0: acc 0.7379679144385026, correct 138/187\n",
      "class 1: acc 0.6785714285714286, correct 114/168\n",
      "class 2: acc 0.8031088082901554, correct 155/193\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.5719, val_error: 0.1967, auc: 0.9261\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8378378378378378, correct 31/37\n",
      "class 1: acc 0.5714285714285714, correct 8/14\n",
      "class 2: acc 1.0, correct 10/10\n",
      "Validation loss decreased (inf --> 0.926091).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.3456, instance_loss: 0.0409, weighted_loss: 0.2542, label: 2, bag_size: 13963\n",
      "batch 39, loss: 1.1039, instance_loss: 0.0048, weighted_loss: 0.7742, label: 1, bag_size: 61193\n",
      "batch 59, loss: 2.3403, instance_loss: 0.6068, weighted_loss: 1.8202, label: 0, bag_size: 30355\n",
      "batch 79, loss: 0.0481, instance_loss: 0.0080, weighted_loss: 0.0361, label: 1, bag_size: 42941\n",
      "batch 99, loss: 0.0090, instance_loss: 0.0091, weighted_loss: 0.0090, label: 0, bag_size: 9374\n",
      "batch 119, loss: 0.0556, instance_loss: 0.0243, weighted_loss: 0.0462, label: 2, bag_size: 73189\n",
      "batch 139, loss: 0.1442, instance_loss: 0.0116, weighted_loss: 0.1044, label: 0, bag_size: 93592\n",
      "batch 159, loss: 0.0104, instance_loss: 0.0250, weighted_loss: 0.0148, label: 0, bag_size: 65407\n",
      "batch 179, loss: 1.0747, instance_loss: 0.0029, weighted_loss: 0.7532, label: 2, bag_size: 91267\n",
      "batch 199, loss: 1.9514, instance_loss: 0.0046, weighted_loss: 1.3674, label: 0, bag_size: 91903\n",
      "batch 219, loss: 0.0148, instance_loss: 0.0343, weighted_loss: 0.0207, label: 0, bag_size: 92719\n",
      "batch 239, loss: 0.2191, instance_loss: 0.0031, weighted_loss: 0.1543, label: 2, bag_size: 87135\n",
      "batch 259, loss: 0.2242, instance_loss: 0.0066, weighted_loss: 0.1589, label: 0, bag_size: 61984\n",
      "batch 279, loss: 0.1153, instance_loss: 0.3888, weighted_loss: 0.1973, label: 1, bag_size: 47660\n",
      "batch 299, loss: 0.0761, instance_loss: 0.0088, weighted_loss: 0.0559, label: 1, bag_size: 33537\n",
      "batch 319, loss: 0.6554, instance_loss: 0.0332, weighted_loss: 0.4687, label: 1, bag_size: 5928\n",
      "batch 339, loss: 0.2160, instance_loss: 0.0059, weighted_loss: 0.1530, label: 0, bag_size: 44730\n",
      "batch 359, loss: 0.0199, instance_loss: 0.0276, weighted_loss: 0.0222, label: 0, bag_size: 84361\n",
      "batch 379, loss: 0.0261, instance_loss: 0.0020, weighted_loss: 0.0189, label: 0, bag_size: 43598\n",
      "batch 399, loss: 0.1472, instance_loss: 0.0128, weighted_loss: 0.1069, label: 1, bag_size: 56549\n",
      "batch 419, loss: 0.1809, instance_loss: 0.0008, weighted_loss: 0.1269, label: 0, bag_size: 36141\n",
      "batch 439, loss: 0.0054, instance_loss: 0.0006, weighted_loss: 0.0040, label: 0, bag_size: 75202\n",
      "batch 459, loss: 0.4024, instance_loss: 0.0025, weighted_loss: 0.2824, label: 2, bag_size: 103823\n",
      "batch 479, loss: 0.6427, instance_loss: 0.0037, weighted_loss: 0.4510, label: 2, bag_size: 58111\n",
      "batch 499, loss: 2.3600, instance_loss: 0.0028, weighted_loss: 1.6529, label: 0, bag_size: 74863\n",
      "batch 519, loss: 1.1952, instance_loss: 0.0032, weighted_loss: 0.8376, label: 1, bag_size: 22039\n",
      "batch 539, loss: 2.7457, instance_loss: 0.0209, weighted_loss: 1.9283, label: 2, bag_size: 52584\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.9974908759124088: correct 4373/4384\n",
      "class 1 clustering acc 0.9984032846715328: correct 4377/4384\n",
      "Epoch: 1, train_loss: 0.3886, train_clustering_loss:  0.0245, train_error: 0.1241\n",
      "class 0: acc 0.8397790055248618, correct 152/181\n",
      "class 1: acc 0.8429319371727748, correct 161/191\n",
      "class 2: acc 0.9488636363636364, correct 167/176\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.5536, val_error: 0.1967, auc: 0.9408\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.7837837837837838, correct 29/37\n",
      "class 1: acc 0.7857142857142857, correct 11/14\n",
      "class 2: acc 0.9, correct 9/10\n",
      "Validation loss decreased (0.926091 --> 0.940754).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0470, instance_loss: 0.0047, weighted_loss: 0.0344, label: 2, bag_size: 69002\n",
      "batch 39, loss: 0.0340, instance_loss: 0.0926, weighted_loss: 0.0515, label: 1, bag_size: 2328\n",
      "batch 59, loss: 0.0195, instance_loss: 0.0013, weighted_loss: 0.0141, label: 0, bag_size: 79972\n",
      "batch 79, loss: 2.5049, instance_loss: 0.0297, weighted_loss: 1.7623, label: 1, bag_size: 61193\n",
      "batch 99, loss: 0.2834, instance_loss: 0.0119, weighted_loss: 0.2020, label: 2, bag_size: 59810\n",
      "batch 119, loss: 0.0845, instance_loss: 0.0000, weighted_loss: 0.0591, label: 1, bag_size: 39770\n",
      "batch 139, loss: 0.1203, instance_loss: 0.0037, weighted_loss: 0.0853, label: 1, bag_size: 81168\n",
      "batch 159, loss: 0.0464, instance_loss: 0.0521, weighted_loss: 0.0481, label: 0, bag_size: 59830\n",
      "batch 179, loss: 0.0671, instance_loss: 0.0021, weighted_loss: 0.0476, label: 2, bag_size: 65340\n",
      "batch 199, loss: 0.0704, instance_loss: 0.0027, weighted_loss: 0.0501, label: 2, bag_size: 54040\n",
      "batch 219, loss: 0.1456, instance_loss: 0.0223, weighted_loss: 0.1086, label: 0, bag_size: 50823\n",
      "batch 239, loss: 0.2508, instance_loss: 0.0012, weighted_loss: 0.1759, label: 0, bag_size: 53043\n",
      "batch 259, loss: 0.3775, instance_loss: 0.0061, weighted_loss: 0.2661, label: 1, bag_size: 86619\n",
      "batch 279, loss: 0.0311, instance_loss: 0.0029, weighted_loss: 0.0226, label: 0, bag_size: 48547\n",
      "batch 299, loss: 0.0042, instance_loss: 0.0002, weighted_loss: 0.0030, label: 0, bag_size: 35639\n",
      "batch 319, loss: 2.7440, instance_loss: 0.0231, weighted_loss: 1.9277, label: 0, bag_size: 59783\n",
      "batch 339, loss: 0.0928, instance_loss: 0.0282, weighted_loss: 0.0734, label: 2, bag_size: 79690\n",
      "batch 359, loss: 0.0361, instance_loss: 0.0011, weighted_loss: 0.0256, label: 0, bag_size: 86612\n",
      "batch 379, loss: 0.1030, instance_loss: 0.0009, weighted_loss: 0.0723, label: 0, bag_size: 82846\n",
      "batch 399, loss: 1.0080, instance_loss: 0.0016, weighted_loss: 0.7061, label: 1, bag_size: 114116\n",
      "batch 419, loss: 0.1073, instance_loss: 0.0518, weighted_loss: 0.0906, label: 2, bag_size: 40315\n",
      "batch 439, loss: 0.0374, instance_loss: 0.0057, weighted_loss: 0.0279, label: 0, bag_size: 20672\n",
      "batch 459, loss: 0.1579, instance_loss: 0.0148, weighted_loss: 0.1150, label: 1, bag_size: 50909\n",
      "batch 479, loss: 0.0377, instance_loss: 0.0015, weighted_loss: 0.0268, label: 1, bag_size: 79045\n",
      "batch 499, loss: 0.0123, instance_loss: 0.0133, weighted_loss: 0.0126, label: 2, bag_size: 66345\n",
      "batch 519, loss: 0.3760, instance_loss: 0.0056, weighted_loss: 0.2649, label: 2, bag_size: 91267\n",
      "batch 539, loss: 0.1246, instance_loss: 0.0009, weighted_loss: 0.0875, label: 0, bag_size: 53214\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.9986313868613139: correct 4378/4384\n",
      "class 1 clustering acc 0.999771897810219: correct 4383/4384\n",
      "Epoch: 2, train_loss: 0.2754, train_clustering_loss:  0.0131, train_error: 0.0949\n",
      "class 0: acc 0.8994413407821229, correct 161/179\n",
      "class 1: acc 0.882051282051282, correct 172/195\n",
      "class 2: acc 0.9367816091954023, correct 163/174\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.5179, val_error: 0.1967, auc: 0.9512\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8108108108108109, correct 30/37\n",
      "class 1: acc 0.7857142857142857, correct 11/14\n",
      "class 2: acc 0.8, correct 8/10\n",
      "Validation loss decreased (0.940754 --> 0.951172).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 1.2583, instance_loss: 0.0000, weighted_loss: 0.8808, label: 1, bag_size: 45064\n",
      "batch 39, loss: 0.0469, instance_loss: 0.0218, weighted_loss: 0.0394, label: 2, bag_size: 76037\n",
      "batch 59, loss: 0.0028, instance_loss: 0.0033, weighted_loss: 0.0029, label: 1, bag_size: 57077\n",
      "batch 79, loss: 1.7552, instance_loss: 0.0215, weighted_loss: 1.2351, label: 0, bag_size: 6646\n",
      "batch 99, loss: 0.0016, instance_loss: 0.0040, weighted_loss: 0.0023, label: 1, bag_size: 100810\n",
      "batch 119, loss: 1.3005, instance_loss: 0.0089, weighted_loss: 0.9130, label: 1, bag_size: 51926\n",
      "batch 139, loss: 0.0031, instance_loss: 0.0326, weighted_loss: 0.0120, label: 1, bag_size: 27124\n",
      "batch 159, loss: 0.1324, instance_loss: 0.0510, weighted_loss: 0.1080, label: 1, bag_size: 119731\n",
      "batch 179, loss: 0.0135, instance_loss: 0.0909, weighted_loss: 0.0367, label: 1, bag_size: 44591\n",
      "batch 199, loss: 0.2926, instance_loss: 0.0034, weighted_loss: 0.2058, label: 2, bag_size: 51554\n",
      "batch 219, loss: 0.0053, instance_loss: 0.0194, weighted_loss: 0.0095, label: 2, bag_size: 75833\n",
      "batch 239, loss: 0.1648, instance_loss: 0.0003, weighted_loss: 0.1155, label: 0, bag_size: 70854\n",
      "batch 259, loss: 0.0422, instance_loss: 0.0038, weighted_loss: 0.0307, label: 2, bag_size: 54040\n",
      "batch 279, loss: 0.0465, instance_loss: 0.0129, weighted_loss: 0.0364, label: 1, bag_size: 60316\n",
      "batch 299, loss: 0.0004, instance_loss: 0.0002, weighted_loss: 0.0003, label: 0, bag_size: 85829\n",
      "batch 319, loss: 0.0207, instance_loss: 0.0077, weighted_loss: 0.0168, label: 0, bag_size: 65592\n",
      "batch 339, loss: 0.0015, instance_loss: 0.0259, weighted_loss: 0.0088, label: 1, bag_size: 11509\n",
      "batch 359, loss: 0.2280, instance_loss: 0.0635, weighted_loss: 0.1787, label: 1, bag_size: 5002\n",
      "batch 379, loss: 0.0006, instance_loss: 0.0003, weighted_loss: 0.0005, label: 0, bag_size: 45846\n",
      "batch 399, loss: 0.0992, instance_loss: 0.0043, weighted_loss: 0.0707, label: 2, bag_size: 11415\n",
      "batch 419, loss: 0.0279, instance_loss: 0.0007, weighted_loss: 0.0198, label: 0, bag_size: 43867\n",
      "batch 439, loss: 0.0018, instance_loss: 0.0021, weighted_loss: 0.0019, label: 0, bag_size: 67411\n",
      "batch 459, loss: 0.0136, instance_loss: 0.0002, weighted_loss: 0.0096, label: 0, bag_size: 57545\n",
      "batch 479, loss: 0.0388, instance_loss: 0.0253, weighted_loss: 0.0347, label: 1, bag_size: 74870\n",
      "batch 499, loss: 0.0198, instance_loss: 0.0003, weighted_loss: 0.0139, label: 2, bag_size: 72686\n",
      "batch 519, loss: 0.0756, instance_loss: 0.0032, weighted_loss: 0.0539, label: 2, bag_size: 63921\n",
      "batch 539, loss: 0.2296, instance_loss: 0.0109, weighted_loss: 0.1640, label: 2, bag_size: 15486\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.9974908759124088: correct 4373/4384\n",
      "class 1 clustering acc 0.9984032846715328: correct 4377/4384\n",
      "Epoch: 3, train_loss: 0.2675, train_clustering_loss:  0.0186, train_error: 0.0967\n",
      "class 0: acc 0.8629441624365483, correct 170/197\n",
      "class 1: acc 0.8764705882352941, correct 149/170\n",
      "class 2: acc 0.9723756906077348, correct 176/181\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.5644, val_error: 0.1967, auc: 0.9438\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8378378378378378, correct 31/37\n",
      "class 1: acc 0.7142857142857143, correct 10/14\n",
      "class 2: acc 0.8, correct 8/10\n",
      "EarlyStopping counter: 1 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.1004, instance_loss: 0.0015, weighted_loss: 0.0707, label: 2, bag_size: 50978\n",
      "batch 39, loss: 0.0067, instance_loss: 0.0060, weighted_loss: 0.0065, label: 0, bag_size: 72844\n",
      "batch 59, loss: 0.0000, instance_loss: 0.0008, weighted_loss: 0.0002, label: 0, bag_size: 74596\n",
      "batch 79, loss: 0.3281, instance_loss: 0.0002, weighted_loss: 0.2297, label: 2, bag_size: 49458\n",
      "batch 99, loss: 1.0214, instance_loss: 0.0002, weighted_loss: 0.7150, label: 0, bag_size: 90911\n",
      "batch 119, loss: 0.0178, instance_loss: 0.0015, weighted_loss: 0.0129, label: 2, bag_size: 71567\n",
      "batch 139, loss: 0.0035, instance_loss: 0.0042, weighted_loss: 0.0037, label: 1, bag_size: 15902\n",
      "batch 159, loss: 0.2246, instance_loss: 0.0113, weighted_loss: 0.1606, label: 0, bag_size: 68286\n",
      "batch 179, loss: 0.4971, instance_loss: 0.0459, weighted_loss: 0.3617, label: 2, bag_size: 51554\n",
      "batch 199, loss: 0.5135, instance_loss: 0.0048, weighted_loss: 0.3609, label: 1, bag_size: 9486\n",
      "batch 219, loss: 0.0417, instance_loss: 0.0020, weighted_loss: 0.0298, label: 0, bag_size: 22840\n",
      "batch 239, loss: 0.1859, instance_loss: 0.0055, weighted_loss: 0.1318, label: 2, bag_size: 51554\n",
      "batch 259, loss: 0.0031, instance_loss: 0.0011, weighted_loss: 0.0025, label: 0, bag_size: 28964\n",
      "batch 279, loss: 0.6889, instance_loss: 0.0072, weighted_loss: 0.4844, label: 1, bag_size: 102214\n",
      "batch 299, loss: 0.1276, instance_loss: 0.0141, weighted_loss: 0.0935, label: 1, bag_size: 22039\n",
      "batch 319, loss: 0.0006, instance_loss: 0.0060, weighted_loss: 0.0023, label: 1, bag_size: 7199\n",
      "batch 339, loss: 0.0823, instance_loss: 0.0039, weighted_loss: 0.0588, label: 1, bag_size: 79792\n",
      "batch 359, loss: 0.4507, instance_loss: 0.0082, weighted_loss: 0.3180, label: 1, bag_size: 5928\n",
      "batch 379, loss: 0.0028, instance_loss: 0.0007, weighted_loss: 0.0021, label: 0, bag_size: 45490\n",
      "batch 399, loss: 0.4048, instance_loss: 0.0295, weighted_loss: 0.2922, label: 0, bag_size: 69649\n",
      "batch 419, loss: 0.0003, instance_loss: 0.0003, weighted_loss: 0.0003, label: 0, bag_size: 67238\n",
      "batch 439, loss: 0.0768, instance_loss: 0.0009, weighted_loss: 0.0540, label: 0, bag_size: 108807\n",
      "batch 459, loss: 0.0281, instance_loss: 0.0039, weighted_loss: 0.0209, label: 2, bag_size: 34408\n",
      "batch 479, loss: 0.0333, instance_loss: 0.0019, weighted_loss: 0.0239, label: 1, bag_size: 61135\n",
      "batch 499, loss: 0.0001, instance_loss: 0.0012, weighted_loss: 0.0004, label: 0, bag_size: 74596\n",
      "batch 519, loss: 0.1085, instance_loss: 0.0183, weighted_loss: 0.0814, label: 0, bag_size: 13539\n",
      "batch 539, loss: 0.0446, instance_loss: 0.0042, weighted_loss: 0.0325, label: 2, bag_size: 50978\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.999771897810219: correct 4383/4384\n",
      "class 1 clustering acc 0.9990875912408759: correct 4380/4384\n",
      "Epoch: 4, train_loss: 0.2244, train_clustering_loss:  0.0107, train_error: 0.0748\n",
      "class 0: acc 0.9215686274509803, correct 188/204\n",
      "class 1: acc 0.8875739644970414, correct 150/169\n",
      "class 2: acc 0.9657142857142857, correct 169/175\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.6539, val_error: 0.2787, auc: 0.9465\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.6486486486486487, correct 24/37\n",
      "class 1: acc 0.8571428571428571, correct 12/14\n",
      "class 2: acc 0.8, correct 8/10\n",
      "EarlyStopping counter: 2 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0118, instance_loss: 0.0009, weighted_loss: 0.0085, label: 1, bag_size: 42941\n",
      "batch 39, loss: 0.0057, instance_loss: 0.0030, weighted_loss: 0.0049, label: 0, bag_size: 53706\n",
      "batch 59, loss: 0.2280, instance_loss: 0.0012, weighted_loss: 0.1600, label: 1, bag_size: 102214\n",
      "batch 79, loss: 0.0020, instance_loss: 0.0269, weighted_loss: 0.0095, label: 2, bag_size: 66345\n",
      "batch 99, loss: 0.0306, instance_loss: 0.0006, weighted_loss: 0.0216, label: 0, bag_size: 72078\n",
      "batch 119, loss: 0.0021, instance_loss: 0.0000, weighted_loss: 0.0015, label: 1, bag_size: 39733\n",
      "batch 139, loss: 0.0166, instance_loss: 0.0019, weighted_loss: 0.0122, label: 1, bag_size: 25323\n",
      "batch 159, loss: 0.0025, instance_loss: 0.0100, weighted_loss: 0.0047, label: 0, bag_size: 93049\n",
      "batch 179, loss: 0.0190, instance_loss: 0.0116, weighted_loss: 0.0168, label: 0, bag_size: 66702\n",
      "batch 199, loss: 0.0138, instance_loss: 0.0059, weighted_loss: 0.0115, label: 0, bag_size: 16036\n",
      "batch 219, loss: 0.0024, instance_loss: 0.0063, weighted_loss: 0.0036, label: 2, bag_size: 66345\n",
      "batch 239, loss: 0.0037, instance_loss: 0.0488, weighted_loss: 0.0172, label: 1, bag_size: 2328\n",
      "batch 259, loss: 0.2202, instance_loss: 0.0019, weighted_loss: 0.1547, label: 2, bag_size: 51017\n",
      "batch 279, loss: 0.5647, instance_loss: 0.0152, weighted_loss: 0.3999, label: 2, bag_size: 78955\n",
      "batch 299, loss: 0.0096, instance_loss: 0.0053, weighted_loss: 0.0083, label: 1, bag_size: 76028\n",
      "batch 319, loss: 1.4993, instance_loss: 0.0077, weighted_loss: 1.0518, label: 1, bag_size: 30632\n",
      "batch 339, loss: 0.0074, instance_loss: 0.0000, weighted_loss: 0.0052, label: 0, bag_size: 61317\n",
      "batch 359, loss: 0.0012, instance_loss: 0.0005, weighted_loss: 0.0010, label: 0, bag_size: 59200\n",
      "batch 379, loss: 0.0122, instance_loss: 0.0005, weighted_loss: 0.0087, label: 0, bag_size: 44730\n",
      "batch 399, loss: 0.0115, instance_loss: 0.0134, weighted_loss: 0.0121, label: 1, bag_size: 28349\n",
      "batch 419, loss: 0.0060, instance_loss: 0.0032, weighted_loss: 0.0052, label: 2, bag_size: 63921\n",
      "batch 439, loss: 0.9988, instance_loss: 0.0038, weighted_loss: 0.7003, label: 2, bag_size: 78955\n",
      "batch 459, loss: 0.0050, instance_loss: 0.0000, weighted_loss: 0.0035, label: 1, bag_size: 54288\n",
      "batch 479, loss: 0.0004, instance_loss: 0.0378, weighted_loss: 0.0116, label: 2, bag_size: 66345\n",
      "batch 499, loss: 0.0059, instance_loss: 0.0323, weighted_loss: 0.0138, label: 2, bag_size: 51316\n",
      "batch 519, loss: 0.4469, instance_loss: 0.0203, weighted_loss: 0.3189, label: 2, bag_size: 11415\n",
      "batch 539, loss: 0.0187, instance_loss: 0.0031, weighted_loss: 0.0140, label: 2, bag_size: 54040\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.9990875912408759: correct 4380/4384\n",
      "class 1 clustering acc 0.9995437956204379: correct 4382/4384\n",
      "Epoch: 5, train_loss: 0.2091, train_clustering_loss:  0.0116, train_error: 0.0766\n",
      "class 0: acc 0.9096045197740112, correct 161/177\n",
      "class 1: acc 0.8901098901098901, correct 162/182\n",
      "class 2: acc 0.9682539682539683, correct 183/189\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3637, val_error: 0.1311, auc: 0.9673\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8918918918918919, correct 33/37\n",
      "class 1: acc 0.7857142857142857, correct 11/14\n",
      "class 2: acc 0.9, correct 9/10\n",
      "Validation loss decreased (0.951172 --> 0.967281).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 1.2214, instance_loss: 0.0118, weighted_loss: 0.8585, label: 1, bag_size: 49727\n",
      "batch 39, loss: 0.0078, instance_loss: 0.0060, weighted_loss: 0.0073, label: 1, bag_size: 60316\n",
      "batch 59, loss: 0.1680, instance_loss: 0.0006, weighted_loss: 0.1178, label: 0, bag_size: 67742\n",
      "batch 79, loss: 0.0251, instance_loss: 0.0033, weighted_loss: 0.0185, label: 2, bag_size: 72966\n",
      "batch 99, loss: 0.0038, instance_loss: 0.0018, weighted_loss: 0.0032, label: 1, bag_size: 47400\n",
      "batch 119, loss: 0.0005, instance_loss: 0.0052, weighted_loss: 0.0019, label: 2, bag_size: 68187\n",
      "batch 139, loss: 0.0398, instance_loss: 0.0268, weighted_loss: 0.0359, label: 2, bag_size: 46661\n",
      "batch 159, loss: 0.0369, instance_loss: 0.0147, weighted_loss: 0.0303, label: 2, bag_size: 13963\n",
      "batch 179, loss: 0.0163, instance_loss: 0.0205, weighted_loss: 0.0175, label: 1, bag_size: 4094\n",
      "batch 199, loss: 0.0058, instance_loss: 0.0106, weighted_loss: 0.0072, label: 1, bag_size: 13709\n",
      "batch 219, loss: 0.0011, instance_loss: 0.0035, weighted_loss: 0.0018, label: 2, bag_size: 36978\n",
      "batch 239, loss: 0.0024, instance_loss: 0.0000, weighted_loss: 0.0017, label: 1, bag_size: 53245\n",
      "batch 259, loss: 0.0050, instance_loss: 0.0481, weighted_loss: 0.0179, label: 2, bag_size: 75833\n",
      "batch 279, loss: 0.1686, instance_loss: 0.0006, weighted_loss: 0.1182, label: 0, bag_size: 77628\n",
      "batch 299, loss: 0.0183, instance_loss: 0.0118, weighted_loss: 0.0163, label: 2, bag_size: 76037\n",
      "batch 319, loss: 0.0003, instance_loss: 0.0000, weighted_loss: 0.0002, label: 0, bag_size: 64555\n",
      "batch 339, loss: 0.0002, instance_loss: 0.0000, weighted_loss: 0.0001, label: 0, bag_size: 58969\n",
      "batch 359, loss: 0.3368, instance_loss: 0.0004, weighted_loss: 0.2359, label: 0, bag_size: 38129\n",
      "batch 379, loss: 0.0382, instance_loss: 0.0011, weighted_loss: 0.0270, label: 2, bag_size: 73189\n",
      "batch 399, loss: 0.0017, instance_loss: 0.0206, weighted_loss: 0.0074, label: 2, bag_size: 48593\n",
      "batch 419, loss: 0.0603, instance_loss: 0.0023, weighted_loss: 0.0429, label: 2, bag_size: 50246\n",
      "batch 439, loss: 0.0005, instance_loss: 0.0013, weighted_loss: 0.0008, label: 0, bag_size: 99736\n",
      "batch 459, loss: 0.1339, instance_loss: 0.0020, weighted_loss: 0.0943, label: 1, bag_size: 50909\n",
      "batch 479, loss: 0.0033, instance_loss: 0.0000, weighted_loss: 0.0023, label: 2, bag_size: 38471\n",
      "batch 499, loss: 0.0239, instance_loss: 0.0005, weighted_loss: 0.0169, label: 1, bag_size: 13487\n",
      "batch 519, loss: 0.0010, instance_loss: 0.0052, weighted_loss: 0.0023, label: 2, bag_size: 51251\n",
      "batch 539, loss: 0.0037, instance_loss: 0.0009, weighted_loss: 0.0029, label: 2, bag_size: 49458\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.9995437956204379: correct 4382/4384\n",
      "class 1 clustering acc 0.9995437956204379: correct 4382/4384\n",
      "Epoch: 6, train_loss: 0.1718, train_clustering_loss:  0.0083, train_error: 0.0657\n",
      "class 0: acc 0.9371428571428572, correct 164/175\n",
      "class 1: acc 0.9047619047619048, correct 171/189\n",
      "class 2: acc 0.9619565217391305, correct 177/184\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4701, val_error: 0.1311, auc: 0.9653\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8918918918918919, correct 33/37\n",
      "class 1: acc 0.7857142857142857, correct 11/14\n",
      "class 2: acc 0.9, correct 9/10\n",
      "EarlyStopping counter: 1 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0197, instance_loss: 0.0012, weighted_loss: 0.0142, label: 0, bag_size: 38922\n",
      "batch 39, loss: 0.0162, instance_loss: 0.0135, weighted_loss: 0.0154, label: 1, bag_size: 115202\n",
      "batch 59, loss: 0.0075, instance_loss: 0.0031, weighted_loss: 0.0062, label: 0, bag_size: 51603\n",
      "batch 79, loss: 0.0004, instance_loss: 0.0014, weighted_loss: 0.0007, label: 0, bag_size: 58396\n",
      "batch 99, loss: 0.0335, instance_loss: 0.0089, weighted_loss: 0.0261, label: 0, bag_size: 60909\n",
      "batch 119, loss: 0.0998, instance_loss: 0.0026, weighted_loss: 0.0706, label: 0, bag_size: 63775\n",
      "batch 139, loss: 2.6488, instance_loss: 0.0017, weighted_loss: 1.8547, label: 1, bag_size: 49727\n",
      "batch 159, loss: 1.8231, instance_loss: 0.0011, weighted_loss: 1.2765, label: 0, bag_size: 53401\n",
      "batch 179, loss: 0.0159, instance_loss: 0.0024, weighted_loss: 0.0119, label: 0, bag_size: 84811\n",
      "batch 199, loss: 0.0127, instance_loss: 0.0243, weighted_loss: 0.0162, label: 2, bag_size: 13963\n",
      "batch 219, loss: 0.0007, instance_loss: 0.0016, weighted_loss: 0.0010, label: 1, bag_size: 45270\n",
      "batch 239, loss: 0.0069, instance_loss: 0.0016, weighted_loss: 0.0053, label: 2, bag_size: 66023\n",
      "batch 259, loss: 0.1313, instance_loss: 0.0098, weighted_loss: 0.0949, label: 1, bag_size: 5928\n",
      "batch 279, loss: 0.0114, instance_loss: 0.0055, weighted_loss: 0.0096, label: 1, bag_size: 74826\n",
      "batch 299, loss: 0.5777, instance_loss: 0.0015, weighted_loss: 0.4048, label: 1, bag_size: 112934\n",
      "batch 319, loss: 0.0748, instance_loss: 0.0056, weighted_loss: 0.0540, label: 0, bag_size: 14325\n",
      "batch 339, loss: 0.0313, instance_loss: 0.0057, weighted_loss: 0.0236, label: 1, bag_size: 79573\n",
      "batch 359, loss: 0.1436, instance_loss: 0.0000, weighted_loss: 0.1006, label: 1, bag_size: 102214\n",
      "batch 379, loss: 0.0341, instance_loss: 0.0110, weighted_loss: 0.0271, label: 0, bag_size: 83638\n",
      "batch 399, loss: 0.0016, instance_loss: 0.0018, weighted_loss: 0.0017, label: 2, bag_size: 51251\n",
      "batch 419, loss: 0.0177, instance_loss: 0.0088, weighted_loss: 0.0151, label: 1, bag_size: 10520\n",
      "batch 439, loss: 0.0145, instance_loss: 0.0006, weighted_loss: 0.0103, label: 2, bag_size: 50978\n",
      "batch 459, loss: 0.5537, instance_loss: 0.0000, weighted_loss: 0.3876, label: 1, bag_size: 49548\n",
      "batch 479, loss: 0.0039, instance_loss: 0.0004, weighted_loss: 0.0029, label: 0, bag_size: 39692\n",
      "batch 499, loss: 0.0003, instance_loss: 0.0025, weighted_loss: 0.0010, label: 2, bag_size: 68187\n",
      "batch 519, loss: 0.0066, instance_loss: 0.0046, weighted_loss: 0.0060, label: 2, bag_size: 36978\n",
      "batch 539, loss: 0.3779, instance_loss: 0.0026, weighted_loss: 0.2653, label: 2, bag_size: 78955\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.9988594890510949: correct 4379/4384\n",
      "Epoch: 7, train_loss: 0.1842, train_clustering_loss:  0.0079, train_error: 0.0639\n",
      "class 0: acc 0.8944444444444445, correct 161/180\n",
      "class 1: acc 0.9405405405405406, correct 174/185\n",
      "class 2: acc 0.9726775956284153, correct 178/183\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3495, val_error: 0.1311, auc: 0.9732\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8918918918918919, correct 33/37\n",
      "class 1: acc 0.7857142857142857, correct 11/14\n",
      "class 2: acc 0.9, correct 9/10\n",
      "Validation loss decreased (0.967281 --> 0.973247).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0034, instance_loss: 0.0000, weighted_loss: 0.0024, label: 0, bag_size: 46695\n",
      "batch 39, loss: 0.0007, instance_loss: 0.0088, weighted_loss: 0.0031, label: 1, bag_size: 44760\n",
      "batch 59, loss: 0.0005, instance_loss: 0.0025, weighted_loss: 0.0011, label: 0, bag_size: 75790\n",
      "batch 79, loss: 0.1398, instance_loss: 0.0002, weighted_loss: 0.0979, label: 2, bag_size: 51554\n",
      "batch 99, loss: 0.0040, instance_loss: 0.0062, weighted_loss: 0.0046, label: 2, bag_size: 66345\n",
      "batch 119, loss: 0.0004, instance_loss: 0.0035, weighted_loss: 0.0013, label: 0, bag_size: 73829\n",
      "batch 139, loss: 0.0101, instance_loss: 0.0023, weighted_loss: 0.0077, label: 2, bag_size: 51017\n",
      "batch 159, loss: 0.0984, instance_loss: 0.0003, weighted_loss: 0.0690, label: 0, bag_size: 36141\n",
      "batch 179, loss: 0.0011, instance_loss: 0.0015, weighted_loss: 0.0012, label: 0, bag_size: 58969\n",
      "batch 199, loss: 0.5856, instance_loss: 0.0000, weighted_loss: 0.4099, label: 0, bag_size: 74863\n",
      "batch 219, loss: 1.1860, instance_loss: 0.0152, weighted_loss: 0.8347, label: 0, bag_size: 7318\n",
      "batch 239, loss: 0.0002, instance_loss: 0.0000, weighted_loss: 0.0001, label: 1, bag_size: 74454\n",
      "batch 259, loss: 0.0007, instance_loss: 0.0084, weighted_loss: 0.0030, label: 2, bag_size: 40315\n",
      "batch 279, loss: 2.1294, instance_loss: 0.0035, weighted_loss: 1.4916, label: 1, bag_size: 49139\n",
      "batch 299, loss: 0.0017, instance_loss: 0.0046, weighted_loss: 0.0026, label: 0, bag_size: 66702\n",
      "batch 319, loss: 0.1188, instance_loss: 0.0027, weighted_loss: 0.0840, label: 2, bag_size: 72686\n",
      "batch 339, loss: 0.7344, instance_loss: 0.0000, weighted_loss: 0.5141, label: 1, bag_size: 49548\n",
      "batch 359, loss: 0.0250, instance_loss: 0.0004, weighted_loss: 0.0176, label: 0, bag_size: 77113\n",
      "batch 379, loss: 0.0086, instance_loss: 0.0002, weighted_loss: 0.0061, label: 0, bag_size: 29980\n",
      "batch 399, loss: 0.0011, instance_loss: 0.0019, weighted_loss: 0.0014, label: 0, bag_size: 67411\n",
      "batch 419, loss: 0.0003, instance_loss: 0.0040, weighted_loss: 0.0014, label: 2, bag_size: 72966\n",
      "batch 439, loss: 0.0054, instance_loss: 0.0141, weighted_loss: 0.0080, label: 1, bag_size: 44760\n",
      "batch 459, loss: 0.0005, instance_loss: 0.0048, weighted_loss: 0.0018, label: 0, bag_size: 20910\n",
      "batch 479, loss: 0.0229, instance_loss: 0.0070, weighted_loss: 0.0181, label: 2, bag_size: 64488\n",
      "batch 499, loss: 0.0011, instance_loss: 0.0010, weighted_loss: 0.0010, label: 0, bag_size: 19019\n",
      "batch 519, loss: 0.0194, instance_loss: 0.0004, weighted_loss: 0.0137, label: 0, bag_size: 58267\n",
      "batch 539, loss: 0.0033, instance_loss: 0.0014, weighted_loss: 0.0027, label: 1, bag_size: 70867\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.9995437956204379: correct 4382/4384\n",
      "Epoch: 8, train_loss: 0.1369, train_clustering_loss:  0.0057, train_error: 0.0438\n",
      "class 0: acc 0.9538461538461539, correct 186/195\n",
      "class 1: acc 0.9354838709677419, correct 174/186\n",
      "class 2: acc 0.9820359281437125, correct 164/167\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4505, val_error: 0.1148, auc: 0.9671\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8918918918918919, correct 33/37\n",
      "class 1: acc 0.8571428571428571, correct 12/14\n",
      "class 2: acc 0.9, correct 9/10\n",
      "EarlyStopping counter: 1 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0002, instance_loss: 0.0015, weighted_loss: 0.0006, label: 0, bag_size: 23753\n",
      "batch 39, loss: 0.0002, instance_loss: 0.0007, weighted_loss: 0.0003, label: 1, bag_size: 76028\n",
      "batch 59, loss: 0.0023, instance_loss: 0.0066, weighted_loss: 0.0036, label: 1, bag_size: 47400\n",
      "batch 79, loss: 0.0001, instance_loss: 0.0028, weighted_loss: 0.0009, label: 1, bag_size: 2939\n",
      "batch 99, loss: 0.1176, instance_loss: 0.0010, weighted_loss: 0.0826, label: 2, bag_size: 51554\n",
      "batch 119, loss: 0.0015, instance_loss: 0.0073, weighted_loss: 0.0032, label: 2, bag_size: 84436\n",
      "batch 139, loss: 0.0064, instance_loss: 0.0089, weighted_loss: 0.0072, label: 2, bag_size: 51017\n",
      "batch 159, loss: 0.0015, instance_loss: 0.0082, weighted_loss: 0.0035, label: 0, bag_size: 14481\n",
      "batch 179, loss: 0.2284, instance_loss: 0.0029, weighted_loss: 0.1608, label: 0, bag_size: 67991\n",
      "batch 199, loss: 0.0822, instance_loss: 0.0026, weighted_loss: 0.0583, label: 1, bag_size: 42997\n",
      "batch 219, loss: 0.0030, instance_loss: 0.0021, weighted_loss: 0.0027, label: 1, bag_size: 7667\n",
      "batch 239, loss: 1.7291, instance_loss: 0.0056, weighted_loss: 1.2121, label: 0, bag_size: 50515\n",
      "batch 259, loss: 0.0131, instance_loss: 0.0026, weighted_loss: 0.0099, label: 2, bag_size: 71567\n",
      "batch 279, loss: 0.0119, instance_loss: 0.0036, weighted_loss: 0.0094, label: 2, bag_size: 28252\n",
      "batch 299, loss: 0.0022, instance_loss: 0.0015, weighted_loss: 0.0020, label: 0, bag_size: 43598\n",
      "batch 319, loss: 0.3942, instance_loss: 0.0115, weighted_loss: 0.2794, label: 1, bag_size: 39620\n",
      "batch 339, loss: 0.0009, instance_loss: 0.0004, weighted_loss: 0.0007, label: 0, bag_size: 76402\n",
      "batch 359, loss: 0.1370, instance_loss: 0.0033, weighted_loss: 0.0969, label: 1, bag_size: 10151\n",
      "batch 379, loss: 0.0063, instance_loss: 0.0080, weighted_loss: 0.0068, label: 1, bag_size: 27124\n",
      "batch 399, loss: 0.0094, instance_loss: 0.0000, weighted_loss: 0.0066, label: 1, bag_size: 55668\n",
      "batch 419, loss: 0.0224, instance_loss: 0.0046, weighted_loss: 0.0170, label: 1, bag_size: 7667\n",
      "batch 439, loss: 0.0064, instance_loss: 0.0030, weighted_loss: 0.0054, label: 1, bag_size: 63030\n",
      "batch 459, loss: 0.0250, instance_loss: 0.0018, weighted_loss: 0.0180, label: 2, bag_size: 54040\n",
      "batch 479, loss: 0.0014, instance_loss: 0.0043, weighted_loss: 0.0023, label: 1, bag_size: 44591\n",
      "batch 499, loss: 0.0056, instance_loss: 0.0016, weighted_loss: 0.0044, label: 1, bag_size: 46625\n",
      "batch 519, loss: 0.0080, instance_loss: 0.0050, weighted_loss: 0.0071, label: 2, bag_size: 49458\n",
      "batch 539, loss: 0.0170, instance_loss: 0.0222, weighted_loss: 0.0186, label: 1, bag_size: 21416\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.999771897810219: correct 4383/4384\n",
      "Epoch: 9, train_loss: 0.1344, train_clustering_loss:  0.0054, train_error: 0.0310\n",
      "class 0: acc 0.95, correct 171/180\n",
      "class 1: acc 0.96875, correct 186/192\n",
      "class 2: acc 0.9886363636363636, correct 174/176\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.8013, val_error: 0.1639, auc: 0.9492\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 0.9979508196721312: correct 487/488\n",
      "class 0: acc 0.8648648648648649, correct 32/37\n",
      "class 1: acc 0.6428571428571429, correct 9/14\n",
      "class 2: acc 1.0, correct 10/10\n",
      "EarlyStopping counter: 2 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0068, instance_loss: 0.0094, weighted_loss: 0.0076, label: 1, bag_size: 50027\n",
      "batch 39, loss: 0.0149, instance_loss: 0.0123, weighted_loss: 0.0141, label: 0, bag_size: 11462\n",
      "batch 59, loss: 0.0020, instance_loss: 0.0002, weighted_loss: 0.0015, label: 0, bag_size: 78425\n",
      "batch 79, loss: 0.0047, instance_loss: 0.0032, weighted_loss: 0.0043, label: 1, bag_size: 81168\n",
      "batch 99, loss: 0.6639, instance_loss: 0.0029, weighted_loss: 0.4656, label: 0, bag_size: 99876\n",
      "batch 119, loss: 0.2490, instance_loss: 0.0039, weighted_loss: 0.1754, label: 0, bag_size: 83638\n",
      "batch 139, loss: 0.0010, instance_loss: 0.0012, weighted_loss: 0.0011, label: 2, bag_size: 79373\n",
      "batch 159, loss: 0.0146, instance_loss: 0.0051, weighted_loss: 0.0118, label: 1, bag_size: 7667\n",
      "batch 179, loss: 0.0074, instance_loss: 0.0196, weighted_loss: 0.0110, label: 1, bag_size: 115202\n",
      "batch 199, loss: 1.1251, instance_loss: 0.0042, weighted_loss: 0.7888, label: 1, bag_size: 72833\n",
      "batch 219, loss: 0.0009, instance_loss: 0.0078, weighted_loss: 0.0029, label: 1, bag_size: 46798\n",
      "batch 239, loss: 0.0850, instance_loss: 0.0010, weighted_loss: 0.0598, label: 2, bag_size: 87135\n",
      "batch 259, loss: 1.5025, instance_loss: 0.0269, weighted_loss: 1.0598, label: 1, bag_size: 51926\n",
      "batch 279, loss: 0.0079, instance_loss: 0.0101, weighted_loss: 0.0085, label: 2, bag_size: 16676\n",
      "batch 299, loss: 0.0039, instance_loss: 0.0062, weighted_loss: 0.0046, label: 2, bag_size: 49458\n",
      "batch 319, loss: 0.8878, instance_loss: 0.0174, weighted_loss: 0.6267, label: 0, bag_size: 7318\n",
      "batch 339, loss: 0.0007, instance_loss: 0.0066, weighted_loss: 0.0025, label: 2, bag_size: 68187\n",
      "batch 359, loss: 0.0464, instance_loss: 0.0107, weighted_loss: 0.0357, label: 0, bag_size: 11566\n",
      "batch 379, loss: 0.0628, instance_loss: 0.0042, weighted_loss: 0.0452, label: 1, bag_size: 81168\n",
      "batch 399, loss: 0.0162, instance_loss: 0.0015, weighted_loss: 0.0118, label: 2, bag_size: 55626\n",
      "batch 419, loss: 0.0003, instance_loss: 0.0168, weighted_loss: 0.0052, label: 2, bag_size: 40315\n",
      "batch 439, loss: 0.0442, instance_loss: 0.0019, weighted_loss: 0.0315, label: 1, bag_size: 91540\n",
      "batch 459, loss: 0.0328, instance_loss: 0.0010, weighted_loss: 0.0232, label: 1, bag_size: 24044\n",
      "batch 479, loss: 0.3171, instance_loss: 0.0053, weighted_loss: 0.2236, label: 0, bag_size: 16184\n",
      "batch 499, loss: 0.0049, instance_loss: 0.0131, weighted_loss: 0.0074, label: 2, bag_size: 76037\n",
      "batch 519, loss: 0.0607, instance_loss: 0.0067, weighted_loss: 0.0445, label: 0, bag_size: 33045\n",
      "batch 539, loss: 0.0085, instance_loss: 0.0053, weighted_loss: 0.0075, label: 2, bag_size: 47603\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.999771897810219: correct 4383/4384\n",
      "Epoch: 10, train_loss: 0.1985, train_clustering_loss:  0.0068, train_error: 0.0821\n",
      "class 0: acc 0.8807947019867549, correct 133/151\n",
      "class 1: acc 0.8888888888888888, correct 176/198\n",
      "class 2: acc 0.9748743718592965, correct 194/199\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.5246, val_error: 0.1475, auc: 0.9535\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8108108108108109, correct 30/37\n",
      "class 1: acc 0.9285714285714286, correct 13/14\n",
      "class 2: acc 0.9, correct 9/10\n",
      "EarlyStopping counter: 3 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0013, instance_loss: 0.0027, weighted_loss: 0.0017, label: 0, bag_size: 53706\n",
      "batch 39, loss: 0.0031, instance_loss: 0.0028, weighted_loss: 0.0030, label: 0, bag_size: 38922\n",
      "batch 59, loss: 0.0700, instance_loss: 0.0039, weighted_loss: 0.0502, label: 2, bag_size: 5173\n",
      "batch 79, loss: 0.0012, instance_loss: 0.0021, weighted_loss: 0.0014, label: 0, bag_size: 89352\n",
      "batch 99, loss: 0.0034, instance_loss: 0.0020, weighted_loss: 0.0030, label: 0, bag_size: 44730\n",
      "batch 119, loss: 0.0161, instance_loss: 0.0000, weighted_loss: 0.0113, label: 2, bag_size: 103823\n",
      "batch 139, loss: 0.0002, instance_loss: 0.0033, weighted_loss: 0.0011, label: 2, bag_size: 69002\n",
      "batch 159, loss: 0.2798, instance_loss: 0.0098, weighted_loss: 0.1988, label: 1, bag_size: 27666\n",
      "batch 179, loss: 0.0026, instance_loss: 0.0006, weighted_loss: 0.0020, label: 0, bag_size: 71332\n",
      "batch 199, loss: 0.0129, instance_loss: 0.0009, weighted_loss: 0.0093, label: 1, bag_size: 77740\n",
      "batch 219, loss: 0.0097, instance_loss: 0.0000, weighted_loss: 0.0068, label: 2, bag_size: 103823\n",
      "batch 239, loss: 0.0073, instance_loss: 0.0025, weighted_loss: 0.0059, label: 2, bag_size: 64675\n",
      "batch 259, loss: 0.0015, instance_loss: 0.0054, weighted_loss: 0.0027, label: 2, bag_size: 30615\n",
      "batch 279, loss: 0.5616, instance_loss: 0.0019, weighted_loss: 0.3937, label: 1, bag_size: 39770\n",
      "batch 299, loss: 0.0067, instance_loss: 0.0046, weighted_loss: 0.0061, label: 0, bag_size: 83638\n",
      "batch 319, loss: 0.0079, instance_loss: 0.0040, weighted_loss: 0.0067, label: 2, bag_size: 82484\n",
      "batch 339, loss: 0.0272, instance_loss: 0.0075, weighted_loss: 0.0213, label: 2, bag_size: 51316\n",
      "batch 359, loss: 0.0005, instance_loss: 0.0024, weighted_loss: 0.0011, label: 0, bag_size: 23753\n",
      "batch 379, loss: 0.0137, instance_loss: 0.0036, weighted_loss: 0.0107, label: 2, bag_size: 39508\n",
      "batch 399, loss: 0.0561, instance_loss: 0.0020, weighted_loss: 0.0398, label: 0, bag_size: 88728\n",
      "batch 419, loss: 0.0029, instance_loss: 0.0021, weighted_loss: 0.0026, label: 0, bag_size: 67411\n",
      "batch 439, loss: 0.0065, instance_loss: 0.0056, weighted_loss: 0.0063, label: 1, bag_size: 66357\n",
      "batch 459, loss: 0.0607, instance_loss: 0.0017, weighted_loss: 0.0430, label: 2, bag_size: 87135\n",
      "batch 479, loss: 0.0364, instance_loss: 0.0011, weighted_loss: 0.0259, label: 1, bag_size: 33537\n",
      "batch 499, loss: 0.0966, instance_loss: 0.0020, weighted_loss: 0.0682, label: 0, bag_size: 22840\n",
      "batch 519, loss: 0.0002, instance_loss: 0.0030, weighted_loss: 0.0010, label: 2, bag_size: 66345\n",
      "batch 539, loss: 0.0020, instance_loss: 0.0039, weighted_loss: 0.0026, label: 2, bag_size: 82484\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.999771897810219: correct 4383/4384\n",
      "Epoch: 11, train_loss: 0.1364, train_clustering_loss:  0.0059, train_error: 0.0456\n",
      "class 0: acc 0.9278350515463918, correct 180/194\n",
      "class 1: acc 0.9506172839506173, correct 154/162\n",
      "class 2: acc 0.984375, correct 189/192\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.5089, val_error: 0.1803, auc: 0.9573\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 0.9979508196721312: correct 487/488\n",
      "class 0: acc 0.7837837837837838, correct 29/37\n",
      "class 1: acc 0.8571428571428571, correct 12/14\n",
      "class 2: acc 0.9, correct 9/10\n",
      "EarlyStopping counter: 4 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0093, instance_loss: 0.0087, weighted_loss: 0.0091, label: 2, bag_size: 30615\n",
      "batch 39, loss: 0.0443, instance_loss: 0.0039, weighted_loss: 0.0322, label: 0, bag_size: 52270\n",
      "batch 59, loss: 0.6381, instance_loss: 0.0026, weighted_loss: 0.4474, label: 1, bag_size: 45812\n",
      "batch 79, loss: 0.0024, instance_loss: 0.0090, weighted_loss: 0.0044, label: 0, bag_size: 84361\n",
      "batch 99, loss: 0.0065, instance_loss: 0.0010, weighted_loss: 0.0049, label: 0, bag_size: 86995\n",
      "batch 119, loss: 0.0800, instance_loss: 0.0011, weighted_loss: 0.0563, label: 2, bag_size: 91267\n",
      "batch 139, loss: 0.0090, instance_loss: 0.0032, weighted_loss: 0.0073, label: 2, bag_size: 73189\n",
      "batch 159, loss: 0.0008, instance_loss: 0.0020, weighted_loss: 0.0011, label: 1, bag_size: 45270\n",
      "batch 179, loss: 0.0001, instance_loss: 0.0009, weighted_loss: 0.0003, label: 0, bag_size: 53706\n",
      "batch 199, loss: 0.0093, instance_loss: 0.0049, weighted_loss: 0.0079, label: 2, bag_size: 75833\n",
      "batch 219, loss: 0.0002, instance_loss: 0.0026, weighted_loss: 0.0009, label: 2, bag_size: 51251\n",
      "batch 239, loss: 0.0039, instance_loss: 0.0025, weighted_loss: 0.0035, label: 0, bag_size: 77113\n",
      "batch 259, loss: 0.0007, instance_loss: 0.0008, weighted_loss: 0.0007, label: 1, bag_size: 61109\n",
      "batch 279, loss: 0.4504, instance_loss: 0.0141, weighted_loss: 0.3195, label: 2, bag_size: 23841\n",
      "batch 299, loss: 0.0041, instance_loss: 0.0019, weighted_loss: 0.0034, label: 1, bag_size: 76028\n",
      "batch 319, loss: 0.0275, instance_loss: 0.0030, weighted_loss: 0.0202, label: 0, bag_size: 77628\n",
      "batch 339, loss: 0.0002, instance_loss: 0.0051, weighted_loss: 0.0017, label: 1, bag_size: 28713\n",
      "batch 359, loss: 0.0396, instance_loss: 0.0033, weighted_loss: 0.0287, label: 1, bag_size: 35189\n",
      "batch 379, loss: 0.0004, instance_loss: 0.0033, weighted_loss: 0.0013, label: 2, bag_size: 66345\n",
      "batch 399, loss: 0.2132, instance_loss: 0.0006, weighted_loss: 0.1495, label: 0, bag_size: 57545\n",
      "batch 419, loss: 0.0001, instance_loss: 0.0050, weighted_loss: 0.0016, label: 2, bag_size: 68187\n",
      "batch 439, loss: 0.0070, instance_loss: 0.0042, weighted_loss: 0.0061, label: 1, bag_size: 119731\n",
      "batch 459, loss: 0.0017, instance_loss: 0.0119, weighted_loss: 0.0047, label: 2, bag_size: 84436\n",
      "batch 479, loss: 0.0010, instance_loss: 0.0056, weighted_loss: 0.0023, label: 1, bag_size: 7199\n",
      "batch 499, loss: 0.0142, instance_loss: 0.0004, weighted_loss: 0.0100, label: 0, bag_size: 79139\n",
      "batch 519, loss: 0.1007, instance_loss: 0.0044, weighted_loss: 0.0718, label: 0, bag_size: 59830\n",
      "batch 539, loss: 0.2936, instance_loss: 0.0018, weighted_loss: 0.2061, label: 0, bag_size: 56229\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.999771897810219: correct 4383/4384\n",
      "Epoch: 12, train_loss: 0.1410, train_clustering_loss:  0.0063, train_error: 0.0493\n",
      "class 0: acc 0.9428571428571428, correct 198/210\n",
      "class 1: acc 0.9226190476190477, correct 155/168\n",
      "class 2: acc 0.9882352941176471, correct 168/170\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4826, val_error: 0.1639, auc: 0.9573\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 0.9979508196721312: correct 487/488\n",
      "class 0: acc 0.8378378378378378, correct 31/37\n",
      "class 1: acc 0.7857142857142857, correct 11/14\n",
      "class 2: acc 0.9, correct 9/10\n",
      "EarlyStopping counter: 5 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.1723, instance_loss: 0.0034, weighted_loss: 0.1217, label: 2, bag_size: 55626\n",
      "batch 39, loss: 0.0016, instance_loss: 0.0014, weighted_loss: 0.0015, label: 0, bag_size: 43067\n",
      "batch 59, loss: 0.0036, instance_loss: 0.0028, weighted_loss: 0.0034, label: 2, bag_size: 50045\n",
      "batch 79, loss: 0.0050, instance_loss: 0.0079, weighted_loss: 0.0059, label: 1, bag_size: 27124\n",
      "batch 99, loss: 0.0301, instance_loss: 0.0053, weighted_loss: 0.0227, label: 0, bag_size: 30263\n",
      "batch 119, loss: 0.0298, instance_loss: 0.0002, weighted_loss: 0.0209, label: 2, bag_size: 103823\n",
      "batch 139, loss: 1.6662, instance_loss: 0.0173, weighted_loss: 1.1716, label: 2, bag_size: 52584\n",
      "batch 159, loss: 0.0031, instance_loss: 0.0147, weighted_loss: 0.0066, label: 0, bag_size: 53181\n",
      "batch 179, loss: 0.7132, instance_loss: 0.0037, weighted_loss: 0.5004, label: 1, bag_size: 20115\n",
      "batch 199, loss: 0.0152, instance_loss: 0.0023, weighted_loss: 0.0113, label: 1, bag_size: 65195\n",
      "batch 219, loss: 0.0000, instance_loss: 0.0015, weighted_loss: 0.0005, label: 0, bag_size: 54512\n",
      "batch 239, loss: 0.0760, instance_loss: 0.0033, weighted_loss: 0.0542, label: 2, bag_size: 23841\n",
      "batch 259, loss: 0.0135, instance_loss: 0.0000, weighted_loss: 0.0095, label: 0, bag_size: 78345\n",
      "batch 279, loss: 0.0849, instance_loss: 0.0011, weighted_loss: 0.0597, label: 0, bag_size: 82846\n",
      "batch 299, loss: 0.0017, instance_loss: 0.0058, weighted_loss: 0.0030, label: 2, bag_size: 84436\n",
      "batch 319, loss: 0.0057, instance_loss: 0.0022, weighted_loss: 0.0046, label: 1, bag_size: 10520\n",
      "batch 339, loss: 0.0046, instance_loss: 0.0068, weighted_loss: 0.0053, label: 2, bag_size: 39234\n",
      "batch 359, loss: 0.0008, instance_loss: 0.0007, weighted_loss: 0.0008, label: 1, bag_size: 57799\n",
      "batch 379, loss: 0.5760, instance_loss: 0.0145, weighted_loss: 0.4075, label: 1, bag_size: 30632\n",
      "batch 399, loss: 0.0571, instance_loss: 0.0047, weighted_loss: 0.0414, label: 1, bag_size: 20115\n",
      "batch 419, loss: 0.0025, instance_loss: 0.0044, weighted_loss: 0.0031, label: 1, bag_size: 51100\n",
      "batch 439, loss: 0.0190, instance_loss: 0.0003, weighted_loss: 0.0134, label: 1, bag_size: 54288\n",
      "batch 459, loss: 0.9468, instance_loss: 0.0086, weighted_loss: 0.6653, label: 0, bag_size: 91611\n",
      "batch 479, loss: 0.0169, instance_loss: 0.0000, weighted_loss: 0.0119, label: 1, bag_size: 61135\n",
      "batch 499, loss: 0.0119, instance_loss: 0.0045, weighted_loss: 0.0097, label: 1, bag_size: 114407\n",
      "batch 519, loss: 0.0017, instance_loss: 0.0035, weighted_loss: 0.0022, label: 2, bag_size: 73189\n",
      "batch 539, loss: 0.1629, instance_loss: 0.0793, weighted_loss: 0.1378, label: 0, bag_size: 84026\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.999771897810219: correct 4383/4384\n",
      "Epoch: 13, train_loss: 0.1640, train_clustering_loss:  0.0069, train_error: 0.0602\n",
      "class 0: acc 0.9065934065934066, correct 165/182\n",
      "class 1: acc 0.9303482587064676, correct 187/201\n",
      "class 2: acc 0.9878787878787879, correct 163/165\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.5441, val_error: 0.1803, auc: 0.9522\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8378378378378378, correct 31/37\n",
      "class 1: acc 0.7142857142857143, correct 10/14\n",
      "class 2: acc 0.9, correct 9/10\n",
      "EarlyStopping counter: 6 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.1762, instance_loss: 0.0220, weighted_loss: 0.1300, label: 0, bag_size: 20672\n",
      "batch 39, loss: 0.1958, instance_loss: 0.0011, weighted_loss: 0.1374, label: 0, bag_size: 85588\n",
      "batch 59, loss: 0.0012, instance_loss: 0.0006, weighted_loss: 0.0010, label: 0, bag_size: 18166\n",
      "batch 79, loss: 0.0008, instance_loss: 0.0084, weighted_loss: 0.0031, label: 2, bag_size: 68187\n",
      "batch 99, loss: 0.0077, instance_loss: 0.0016, weighted_loss: 0.0059, label: 2, bag_size: 91267\n",
      "batch 119, loss: 0.0501, instance_loss: 0.0011, weighted_loss: 0.0354, label: 1, bag_size: 29399\n",
      "batch 139, loss: 0.0016, instance_loss: 0.0040, weighted_loss: 0.0023, label: 1, bag_size: 51100\n",
      "batch 159, loss: 0.0130, instance_loss: 0.0015, weighted_loss: 0.0095, label: 2, bag_size: 39508\n",
      "batch 179, loss: 0.0007, instance_loss: 0.0052, weighted_loss: 0.0021, label: 1, bag_size: 11509\n",
      "batch 199, loss: 0.0098, instance_loss: 0.0011, weighted_loss: 0.0072, label: 0, bag_size: 85831\n",
      "batch 219, loss: 0.0403, instance_loss: 0.0039, weighted_loss: 0.0294, label: 2, bag_size: 15486\n",
      "batch 239, loss: 0.0037, instance_loss: 0.0002, weighted_loss: 0.0026, label: 2, bag_size: 103823\n",
      "batch 259, loss: 1.5555, instance_loss: 0.0017, weighted_loss: 1.0894, label: 2, bag_size: 64675\n",
      "batch 279, loss: 0.0005, instance_loss: 0.0012, weighted_loss: 0.0007, label: 2, bag_size: 79690\n",
      "batch 299, loss: 0.0000, instance_loss: 0.0022, weighted_loss: 0.0007, label: 0, bag_size: 78844\n",
      "batch 319, loss: 0.1178, instance_loss: 0.0052, weighted_loss: 0.0840, label: 1, bag_size: 81168\n",
      "batch 339, loss: 0.1266, instance_loss: 0.0035, weighted_loss: 0.0897, label: 0, bag_size: 53043\n",
      "batch 359, loss: 0.0358, instance_loss: 0.0096, weighted_loss: 0.0279, label: 2, bag_size: 19057\n",
      "batch 379, loss: 2.9348, instance_loss: 0.0024, weighted_loss: 2.0551, label: 1, bag_size: 96719\n",
      "batch 399, loss: 0.5389, instance_loss: 0.0042, weighted_loss: 0.3785, label: 2, bag_size: 15486\n",
      "batch 419, loss: 0.0419, instance_loss: 0.0040, weighted_loss: 0.0305, label: 0, bag_size: 58349\n",
      "batch 439, loss: 0.0323, instance_loss: 0.0013, weighted_loss: 0.0230, label: 2, bag_size: 50978\n",
      "batch 459, loss: 0.0147, instance_loss: 0.0077, weighted_loss: 0.0126, label: 2, bag_size: 5173\n",
      "batch 479, loss: 0.0060, instance_loss: 0.0011, weighted_loss: 0.0045, label: 0, bag_size: 90654\n",
      "batch 499, loss: 0.0019, instance_loss: 0.0006, weighted_loss: 0.0015, label: 1, bag_size: 32084\n",
      "batch 519, loss: 0.0065, instance_loss: 0.0044, weighted_loss: 0.0059, label: 0, bag_size: 61195\n",
      "batch 539, loss: 0.0065, instance_loss: 0.0136, weighted_loss: 0.0086, label: 2, bag_size: 11119\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 14, train_loss: 0.1276, train_clustering_loss:  0.0044, train_error: 0.0474\n",
      "class 0: acc 0.9152542372881356, correct 162/177\n",
      "class 1: acc 0.9447513812154696, correct 171/181\n",
      "class 2: acc 0.9947368421052631, correct 189/190\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4428, val_error: 0.1148, auc: 0.9606\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.918918918918919, correct 34/37\n",
      "class 1: acc 0.7857142857142857, correct 11/14\n",
      "class 2: acc 0.9, correct 9/10\n",
      "EarlyStopping counter: 7 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.5408, instance_loss: 0.0078, weighted_loss: 0.3809, label: 2, bag_size: 15486\n",
      "batch 39, loss: 0.0050, instance_loss: 0.0050, weighted_loss: 0.0050, label: 2, bag_size: 54265\n",
      "batch 59, loss: 0.4392, instance_loss: 0.0021, weighted_loss: 0.3081, label: 0, bag_size: 99876\n",
      "batch 79, loss: 0.1127, instance_loss: 0.0015, weighted_loss: 0.0793, label: 1, bag_size: 36784\n",
      "batch 99, loss: 0.0133, instance_loss: 0.0015, weighted_loss: 0.0098, label: 0, bag_size: 65592\n",
      "batch 119, loss: 0.0001, instance_loss: 0.0019, weighted_loss: 0.0007, label: 0, bag_size: 53706\n",
      "batch 139, loss: 0.2891, instance_loss: 0.0071, weighted_loss: 0.2045, label: 2, bag_size: 12043\n",
      "batch 159, loss: 0.0025, instance_loss: 0.0023, weighted_loss: 0.0024, label: 2, bag_size: 73189\n",
      "batch 179, loss: 0.0050, instance_loss: 0.0027, weighted_loss: 0.0043, label: 2, bag_size: 69002\n",
      "batch 199, loss: 0.0065, instance_loss: 0.0010, weighted_loss: 0.0049, label: 0, bag_size: 89230\n",
      "batch 219, loss: 0.0030, instance_loss: 0.0062, weighted_loss: 0.0040, label: 2, bag_size: 84436\n",
      "batch 239, loss: 0.0007, instance_loss: 0.0047, weighted_loss: 0.0019, label: 1, bag_size: 113716\n",
      "batch 259, loss: 0.0003, instance_loss: 0.0008, weighted_loss: 0.0005, label: 0, bag_size: 64092\n",
      "batch 279, loss: 0.0028, instance_loss: 0.0000, weighted_loss: 0.0020, label: 0, bag_size: 75202\n",
      "batch 299, loss: 1.8656, instance_loss: 0.0044, weighted_loss: 1.3073, label: 1, bag_size: 73500\n",
      "batch 319, loss: 0.0097, instance_loss: 0.0002, weighted_loss: 0.0068, label: 2, bag_size: 103823\n",
      "batch 339, loss: 0.1266, instance_loss: 0.0052, weighted_loss: 0.0902, label: 2, bag_size: 76843\n",
      "batch 359, loss: 0.1144, instance_loss: 0.0035, weighted_loss: 0.0811, label: 1, bag_size: 50027\n",
      "batch 379, loss: 0.0431, instance_loss: 0.0005, weighted_loss: 0.0303, label: 2, bag_size: 64675\n",
      "batch 399, loss: 0.1712, instance_loss: 0.0021, weighted_loss: 0.1205, label: 1, bag_size: 35236\n",
      "batch 419, loss: 0.1159, instance_loss: 0.0028, weighted_loss: 0.0820, label: 0, bag_size: 50370\n",
      "batch 439, loss: 0.0014, instance_loss: 0.0010, weighted_loss: 0.0013, label: 0, bag_size: 70900\n",
      "batch 459, loss: 0.0004, instance_loss: 0.0060, weighted_loss: 0.0021, label: 2, bag_size: 47603\n",
      "batch 479, loss: 0.0081, instance_loss: 0.0027, weighted_loss: 0.0065, label: 1, bag_size: 24044\n",
      "batch 499, loss: 0.0108, instance_loss: 0.0022, weighted_loss: 0.0082, label: 0, bag_size: 21171\n",
      "batch 519, loss: 0.4628, instance_loss: 0.0552, weighted_loss: 0.3405, label: 0, bag_size: 29652\n",
      "batch 539, loss: 0.0309, instance_loss: 0.0044, weighted_loss: 0.0230, label: 2, bag_size: 11415\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.9995437956204379: correct 4382/4384\n",
      "Epoch: 15, train_loss: 0.1357, train_clustering_loss:  0.0061, train_error: 0.0456\n",
      "class 0: acc 0.9421965317919075, correct 163/173\n",
      "class 1: acc 0.9261363636363636, correct 163/176\n",
      "class 2: acc 0.9899497487437185, correct 197/199\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4073, val_error: 0.1148, auc: 0.9635\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8918918918918919, correct 33/37\n",
      "class 1: acc 0.8571428571428571, correct 12/14\n",
      "class 2: acc 0.9, correct 9/10\n",
      "EarlyStopping counter: 8 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.2346, instance_loss: 0.0005, weighted_loss: 0.1644, label: 0, bag_size: 38129\n",
      "batch 39, loss: 0.0775, instance_loss: 0.0031, weighted_loss: 0.0552, label: 1, bag_size: 91540\n",
      "batch 59, loss: 0.1223, instance_loss: 0.0014, weighted_loss: 0.0861, label: 1, bag_size: 18108\n",
      "batch 79, loss: 0.0008, instance_loss: 0.0058, weighted_loss: 0.0023, label: 2, bag_size: 75833\n",
      "batch 99, loss: 0.0179, instance_loss: 0.0040, weighted_loss: 0.0137, label: 2, bag_size: 48881\n",
      "batch 119, loss: 0.0016, instance_loss: 0.0015, weighted_loss: 0.0016, label: 0, bag_size: 78844\n",
      "batch 139, loss: 0.0396, instance_loss: 0.0039, weighted_loss: 0.0289, label: 2, bag_size: 11415\n",
      "batch 159, loss: 0.0551, instance_loss: 0.0090, weighted_loss: 0.0413, label: 1, bag_size: 56911\n",
      "batch 179, loss: 0.1267, instance_loss: 0.0298, weighted_loss: 0.0977, label: 1, bag_size: 115202\n",
      "batch 199, loss: 0.0134, instance_loss: 0.0065, weighted_loss: 0.0113, label: 0, bag_size: 30263\n",
      "batch 219, loss: 0.0010, instance_loss: 0.0033, weighted_loss: 0.0017, label: 2, bag_size: 95428\n",
      "batch 239, loss: 0.0006, instance_loss: 0.0022, weighted_loss: 0.0010, label: 2, bag_size: 84436\n",
      "batch 259, loss: 0.0155, instance_loss: 0.0022, weighted_loss: 0.0115, label: 1, bag_size: 24044\n",
      "batch 279, loss: 0.0067, instance_loss: 0.0012, weighted_loss: 0.0051, label: 0, bag_size: 76103\n",
      "batch 299, loss: 0.5893, instance_loss: 0.0167, weighted_loss: 0.4175, label: 1, bag_size: 50909\n",
      "batch 319, loss: 0.0041, instance_loss: 0.0030, weighted_loss: 0.0038, label: 2, bag_size: 64675\n",
      "batch 339, loss: 0.0764, instance_loss: 0.0029, weighted_loss: 0.0543, label: 0, bag_size: 76103\n",
      "batch 359, loss: 0.0224, instance_loss: 0.0009, weighted_loss: 0.0160, label: 1, bag_size: 45070\n",
      "batch 379, loss: 0.0018, instance_loss: 0.0037, weighted_loss: 0.0023, label: 0, bag_size: 77113\n",
      "batch 399, loss: 0.0480, instance_loss: 0.0071, weighted_loss: 0.0357, label: 1, bag_size: 16596\n",
      "batch 419, loss: 0.0020, instance_loss: 0.0028, weighted_loss: 0.0022, label: 0, bag_size: 88181\n",
      "batch 439, loss: 0.0022, instance_loss: 0.0030, weighted_loss: 0.0025, label: 0, bag_size: 53184\n",
      "batch 459, loss: 0.4728, instance_loss: 0.0062, weighted_loss: 0.3328, label: 2, bag_size: 76843\n",
      "batch 479, loss: 0.0444, instance_loss: 0.0034, weighted_loss: 0.0321, label: 1, bag_size: 8400\n",
      "batch 499, loss: 0.0000, instance_loss: 0.0056, weighted_loss: 0.0017, label: 1, bag_size: 2939\n",
      "batch 519, loss: 0.0027, instance_loss: 0.0027, weighted_loss: 0.0027, label: 0, bag_size: 59101\n",
      "batch 539, loss: 0.0001, instance_loss: 0.0072, weighted_loss: 0.0022, label: 2, bag_size: 36978\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 16, train_loss: 0.1544, train_clustering_loss:  0.0057, train_error: 0.0657\n",
      "class 0: acc 0.9257425742574258, correct 187/202\n",
      "class 1: acc 0.9221556886227545, correct 154/167\n",
      "class 2: acc 0.9553072625698324, correct 171/179\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4607, val_error: 0.0984, auc: 0.9618\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 0.9979508196721312: correct 487/488\n",
      "class 0: acc 0.918918918918919, correct 34/37\n",
      "class 1: acc 0.7857142857142857, correct 11/14\n",
      "class 2: acc 1.0, correct 10/10\n",
      "EarlyStopping counter: 9 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.2653, instance_loss: 0.0039, weighted_loss: 0.1869, label: 0, bag_size: 22698\n",
      "batch 39, loss: 0.0892, instance_loss: 0.0027, weighted_loss: 0.0633, label: 1, bag_size: 72255\n",
      "batch 59, loss: 0.0132, instance_loss: 0.0118, weighted_loss: 0.0128, label: 2, bag_size: 63921\n",
      "batch 79, loss: 0.0006, instance_loss: 0.0022, weighted_loss: 0.0011, label: 0, bag_size: 77221\n",
      "batch 99, loss: 0.0185, instance_loss: 0.0011, weighted_loss: 0.0133, label: 0, bag_size: 82846\n",
      "batch 119, loss: 0.6974, instance_loss: 0.0021, weighted_loss: 0.4888, label: 0, bag_size: 11284\n",
      "batch 139, loss: 0.9756, instance_loss: 0.0022, weighted_loss: 0.6836, label: 1, bag_size: 60865\n",
      "batch 159, loss: 0.2265, instance_loss: 0.0035, weighted_loss: 0.1596, label: 0, bag_size: 52270\n",
      "batch 179, loss: 0.0553, instance_loss: 0.0014, weighted_loss: 0.0392, label: 2, bag_size: 51554\n",
      "batch 199, loss: 0.0001, instance_loss: 0.0023, weighted_loss: 0.0007, label: 1, bag_size: 92385\n",
      "batch 219, loss: 0.0159, instance_loss: 0.0005, weighted_loss: 0.0112, label: 1, bag_size: 78482\n",
      "batch 239, loss: 0.0290, instance_loss: 0.0010, weighted_loss: 0.0206, label: 0, bag_size: 67598\n",
      "batch 259, loss: 0.0003, instance_loss: 0.0018, weighted_loss: 0.0008, label: 2, bag_size: 72966\n",
      "batch 279, loss: 1.6241, instance_loss: 0.0020, weighted_loss: 1.1374, label: 0, bag_size: 38129\n",
      "batch 299, loss: 0.0011, instance_loss: 0.0023, weighted_loss: 0.0014, label: 0, bag_size: 9321\n",
      "batch 319, loss: 0.0076, instance_loss: 0.0021, weighted_loss: 0.0060, label: 0, bag_size: 38922\n",
      "batch 339, loss: 0.0234, instance_loss: 0.0132, weighted_loss: 0.0203, label: 2, bag_size: 16676\n",
      "batch 359, loss: 0.2243, instance_loss: 0.0009, weighted_loss: 0.1573, label: 1, bag_size: 72255\n",
      "batch 379, loss: 0.3627, instance_loss: 0.0009, weighted_loss: 0.2542, label: 1, bag_size: 109684\n",
      "batch 399, loss: 0.0916, instance_loss: 0.0015, weighted_loss: 0.0646, label: 1, bag_size: 92502\n",
      "batch 419, loss: 0.0011, instance_loss: 0.0031, weighted_loss: 0.0017, label: 2, bag_size: 68187\n",
      "batch 439, loss: 0.0003, instance_loss: 0.0027, weighted_loss: 0.0010, label: 2, bag_size: 46661\n",
      "batch 459, loss: 0.1001, instance_loss: 0.0087, weighted_loss: 0.0727, label: 0, bag_size: 7302\n",
      "batch 479, loss: 0.1081, instance_loss: 0.0023, weighted_loss: 0.0764, label: 1, bag_size: 108492\n",
      "batch 499, loss: 0.0002, instance_loss: 0.0002, weighted_loss: 0.0002, label: 2, bag_size: 50045\n",
      "batch 519, loss: 0.1574, instance_loss: 0.0011, weighted_loss: 0.1105, label: 1, bag_size: 38177\n",
      "batch 539, loss: 0.0007, instance_loss: 0.0011, weighted_loss: 0.0008, label: 1, bag_size: 67008\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 17, train_loss: 0.1354, train_clustering_loss:  0.0057, train_error: 0.0547\n",
      "class 0: acc 0.9239130434782609, correct 170/184\n",
      "class 1: acc 0.9226519337016574, correct 167/181\n",
      "class 2: acc 0.9890710382513661, correct 181/183\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4808, val_error: 0.0984, auc: 0.9614\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8918918918918919, correct 33/37\n",
      "class 1: acc 0.9285714285714286, correct 13/14\n",
      "class 2: acc 0.9, correct 9/10\n",
      "EarlyStopping counter: 10 out of 10\n",
      "Early stopping\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.9684684684684686\n",
      "0.9787234042553191\n",
      "0.9725490196078431\n",
      "Val error: 0.1311, ROC AUC: 0.9732\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.9892682926829268\n",
      "0.9844271412680756\n",
      "1.0\n",
      "Test error: 0.0440, ROC AUC: 0.9912\n",
      "class 0: acc 0.94, correct 47/50\n",
      "class 1: acc 0.9655172413793104, correct 28/29\n",
      "class 2: acc 1.0, correct 12/12\n",
      "   Unnamed: 0       case_id  \\\n",
      "0           0  TCGA-4A-A93X   \n",
      "1           1  TCGA-B3-4104   \n",
      "2           2  TCGA-BP-4963   \n",
      "3           3  TCGA-BP-5170   \n",
      "4           4  TCGA-BP-5175   \n",
      "\n",
      "                                            slide_id oncotree_code site   age  \\\n",
      "0  TCGA-4A-A93X-01Z-00-DX2.45011BF1-FED8-4D22-B5E...          PRCC   4A  58.0   \n",
      "1  TCGA-B3-4104-01Z-00-DX1.0783e269-2e8a-4f32-b91...          PRCC   B3  75.0   \n",
      "2  TCGA-BP-4963-01Z-00-DX1.7e206961-5271-40d3-a96...         CCRCC   BP  63.0   \n",
      "3  TCGA-BP-5170-01Z-00-DX1.ae43bef7-3d81-4f69-be3...         CCRCC   BP  55.0   \n",
      "4  TCGA-BP-5175-01Z-00-DX1.e954ae94-307c-475e-9f6...         CCRCC   BP  60.0   \n",
      "\n",
      "   survival_months  is_female  censorship race label  \n",
      "0            12.81        0.0         1.0    W     1  \n",
      "1            34.46        0.0         1.0    W     1  \n",
      "2            60.25        0.0         1.0    W     0  \n",
      "3            79.24        0.0         1.0    W     0  \n",
      "4            30.62        0.0         1.0    W     0  \n",
      "Traing Data Size ({1.00}): 548 ./RCC/FEATURES_DIRECTORY_beph/pt_files/\n",
      "Cluster file missing\n",
      "Traing Data Size ({1.00}): 61 ./RCC/FEATURES_DIRECTORY_beph/pt_files/\n",
      "Cluster file missing\n",
      "Traing Data Size ({1.00}): 91 ./RCC/FEATURES_DIRECTORY_beph/pt_files/\n",
      "Cluster file missing\n",
      "\n",
      "Training Fold 1!\n",
      "\n",
      "Init train/val/test splits... \n",
      "Done!\n",
      "Training on 548 samples\n",
      "Validating on 61 samples\n",
      "Testing on 91 samples\n",
      "\n",
      "Init loss function... Done!\n",
      "\n",
      "Init Model... Setting tau to 1.0\n",
      "Done!\n",
      "CLAM_SB(\n",
      "  (attention_net): Sequential(\n",
      "    (0): Linear(in_features=384, out_features=384, bias=True)\n",
      "    (1): ReLU()\n",
      "    (2): Dropout(p=0.25, inplace=False)\n",
      "    (3): Attn_Net_Gated(\n",
      "      (attention_a): Sequential(\n",
      "        (0): Linear(in_features=384, out_features=256, bias=True)\n",
      "        (1): Tanh()\n",
      "        (2): Dropout(p=0.25, inplace=False)\n",
      "      )\n",
      "      (attention_b): Sequential(\n",
      "        (0): Linear(in_features=384, out_features=256, bias=True)\n",
      "        (1): Sigmoid()\n",
      "        (2): Dropout(p=0.25, inplace=False)\n",
      "      )\n",
      "      (attention_c): Linear(in_features=256, out_features=1, bias=True)\n",
      "    )\n",
      "  )\n",
      "  (feature_linear1): Linear(in_features=768, out_features=384, bias=True)\n",
      "  (classifiers): Linear(in_features=384, out_features=3, bias=True)\n",
      "  (instance_classifiers): ModuleList(\n",
      "    (0): Linear(in_features=384, out_features=2, bias=True)\n",
      "    (1): Linear(in_features=384, out_features=2, bias=True)\n",
      "    (2): Linear(in_features=384, out_features=2, bias=True)\n",
      "  )\n",
      "  (instance_loss_fn): SmoothTop1SVM()\n",
      ")\n",
      "Total number of parameters: 643978\n",
      "Total number of trainable parameters: 643978\n",
      "\n",
      "Init optimizer ... Done!\n",
      "\n",
      "Init Loaders... Done!\n",
      "\n",
      "Setup EarlyStopping... Done!\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.7950, instance_loss: 1.0997, weighted_loss: 0.8864, label: 2, bag_size: 59224\n",
      "batch 39, loss: 0.8760, instance_loss: 1.2560, weighted_loss: 0.9900, label: 0, bag_size: 23753\n",
      "batch 59, loss: 1.3354, instance_loss: 0.9349, weighted_loss: 1.2152, label: 0, bag_size: 56351\n",
      "batch 79, loss: 0.7565, instance_loss: 0.6359, weighted_loss: 0.7203, label: 0, bag_size: 16262\n",
      "batch 99, loss: 0.4950, instance_loss: 0.9629, weighted_loss: 0.6354, label: 2, bag_size: 51554\n",
      "batch 119, loss: 0.3161, instance_loss: 0.2632, weighted_loss: 0.3003, label: 2, bag_size: 68187\n",
      "batch 139, loss: 0.7150, instance_loss: 0.2287, weighted_loss: 0.5691, label: 0, bag_size: 34548\n",
      "batch 159, loss: 2.0856, instance_loss: 0.1276, weighted_loss: 1.4982, label: 0, bag_size: 21504\n",
      "batch 179, loss: 1.0092, instance_loss: 0.2085, weighted_loss: 0.7690, label: 1, bag_size: 28713\n",
      "batch 199, loss: 0.2009, instance_loss: 0.2670, weighted_loss: 0.2208, label: 2, bag_size: 30615\n",
      "batch 219, loss: 0.5354, instance_loss: 0.0438, weighted_loss: 0.3879, label: 0, bag_size: 77339\n",
      "batch 239, loss: 0.1927, instance_loss: 0.1923, weighted_loss: 0.1926, label: 1, bag_size: 91901\n",
      "batch 259, loss: 0.5856, instance_loss: 0.0739, weighted_loss: 0.4321, label: 2, bag_size: 72966\n",
      "batch 279, loss: 1.3197, instance_loss: 0.0718, weighted_loss: 0.9454, label: 1, bag_size: 96881\n",
      "batch 299, loss: 0.5692, instance_loss: 0.0480, weighted_loss: 0.4129, label: 1, bag_size: 57077\n",
      "batch 319, loss: 0.1376, instance_loss: 0.0164, weighted_loss: 0.1013, label: 2, bag_size: 68187\n",
      "batch 339, loss: 0.2110, instance_loss: 0.0505, weighted_loss: 0.1628, label: 2, bag_size: 66345\n",
      "batch 359, loss: 0.6536, instance_loss: 0.0459, weighted_loss: 0.4713, label: 2, bag_size: 87135\n",
      "batch 379, loss: 0.3019, instance_loss: 0.0157, weighted_loss: 0.2160, label: 2, bag_size: 39508\n",
      "batch 399, loss: 0.3345, instance_loss: 0.0039, weighted_loss: 0.2353, label: 0, bag_size: 61195\n",
      "batch 419, loss: 0.8249, instance_loss: 0.0092, weighted_loss: 0.5802, label: 1, bag_size: 80573\n",
      "batch 439, loss: 0.3975, instance_loss: 0.0242, weighted_loss: 0.2855, label: 1, bag_size: 172802\n",
      "batch 459, loss: 0.7877, instance_loss: 0.0220, weighted_loss: 0.5580, label: 2, bag_size: 64675\n",
      "batch 479, loss: 0.0538, instance_loss: 0.0146, weighted_loss: 0.0420, label: 2, bag_size: 72686\n",
      "batch 499, loss: 0.1494, instance_loss: 0.0148, weighted_loss: 0.1090, label: 2, bag_size: 59224\n",
      "batch 519, loss: 0.0604, instance_loss: 0.0238, weighted_loss: 0.0494, label: 2, bag_size: 34408\n",
      "batch 539, loss: 0.1411, instance_loss: 0.0950, weighted_loss: 0.1273, label: 1, bag_size: 28349\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.9370437956204379: correct 4108/4384\n",
      "class 1 clustering acc 0.8911952554744526: correct 3907/4384\n",
      "Epoch: 0, train_loss: 0.6814, train_clustering_loss:  0.3521, train_error: 0.2847\n",
      "class 0: acc 0.6484848484848484, correct 107/165\n",
      "class 1: acc 0.694300518134715, correct 134/193\n",
      "class 2: acc 0.7947368421052632, correct 151/190\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.6601, val_error: 0.2459, auc: 0.9292\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.9230769230769231, correct 24/26\n",
      "class 1: acc 0.45454545454545453, correct 10/22\n",
      "class 2: acc 0.9230769230769231, correct 12/13\n",
      "Validation loss decreased (inf --> 0.929215).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.2534, instance_loss: 0.0738, weighted_loss: 0.1995, label: 2, bag_size: 13963\n",
      "batch 39, loss: 1.1191, instance_loss: 0.0304, weighted_loss: 0.7925, label: 0, bag_size: 79156\n",
      "batch 59, loss: 1.2140, instance_loss: 0.0060, weighted_loss: 0.8516, label: 1, bag_size: 99153\n",
      "batch 79, loss: 0.0226, instance_loss: 0.0018, weighted_loss: 0.0164, label: 1, bag_size: 64851\n",
      "batch 99, loss: 1.1525, instance_loss: 0.0090, weighted_loss: 0.8094, label: 0, bag_size: 63775\n",
      "batch 119, loss: 0.5859, instance_loss: 0.0686, weighted_loss: 0.4307, label: 2, bag_size: 76843\n",
      "batch 139, loss: 0.1468, instance_loss: 0.0271, weighted_loss: 0.1109, label: 2, bag_size: 16282\n",
      "batch 159, loss: 0.2955, instance_loss: 0.0147, weighted_loss: 0.2112, label: 1, bag_size: 31015\n",
      "batch 179, loss: 0.0008, instance_loss: 0.0174, weighted_loss: 0.0058, label: 1, bag_size: 27124\n",
      "batch 199, loss: 0.0235, instance_loss: 0.0132, weighted_loss: 0.0204, label: 0, bag_size: 6579\n",
      "batch 219, loss: 0.0089, instance_loss: 0.0060, weighted_loss: 0.0081, label: 0, bag_size: 58648\n",
      "batch 239, loss: 0.1511, instance_loss: 0.0060, weighted_loss: 0.1076, label: 2, bag_size: 51554\n",
      "batch 259, loss: 0.0101, instance_loss: 0.0203, weighted_loss: 0.0131, label: 1, bag_size: 53211\n",
      "batch 279, loss: 0.1383, instance_loss: 0.0207, weighted_loss: 0.1031, label: 0, bag_size: 13926\n",
      "batch 299, loss: 0.0500, instance_loss: 0.0042, weighted_loss: 0.0362, label: 0, bag_size: 51603\n",
      "batch 319, loss: 0.0622, instance_loss: 0.0303, weighted_loss: 0.0526, label: 1, bag_size: 44760\n",
      "batch 339, loss: 0.5517, instance_loss: 0.1869, weighted_loss: 0.4422, label: 2, bag_size: 52584\n",
      "batch 359, loss: 0.5163, instance_loss: 0.0045, weighted_loss: 0.3627, label: 0, bag_size: 112851\n",
      "batch 379, loss: 3.9688, instance_loss: 0.0191, weighted_loss: 2.7839, label: 0, bag_size: 13539\n",
      "batch 399, loss: 0.4571, instance_loss: 0.0011, weighted_loss: 0.3203, label: 1, bag_size: 55668\n",
      "batch 419, loss: 0.1352, instance_loss: 0.0264, weighted_loss: 0.1025, label: 2, bag_size: 47138\n",
      "batch 439, loss: 0.0151, instance_loss: 0.0138, weighted_loss: 0.0148, label: 0, bag_size: 84361\n",
      "batch 459, loss: 0.1106, instance_loss: 0.0027, weighted_loss: 0.0782, label: 2, bag_size: 69002\n",
      "batch 479, loss: 0.2428, instance_loss: 0.0059, weighted_loss: 0.1718, label: 1, bag_size: 46101\n",
      "batch 499, loss: 1.4795, instance_loss: 0.0054, weighted_loss: 1.0373, label: 0, bag_size: 74863\n",
      "batch 519, loss: 0.0168, instance_loss: 0.0203, weighted_loss: 0.0179, label: 0, bag_size: 23506\n",
      "batch 539, loss: 0.0156, instance_loss: 0.0034, weighted_loss: 0.0120, label: 0, bag_size: 86995\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.9965784671532847: correct 4369/4384\n",
      "class 1 clustering acc 0.9988594890510949: correct 4379/4384\n",
      "Epoch: 1, train_loss: 0.3800, train_clustering_loss:  0.0277, train_error: 0.1369\n",
      "class 0: acc 0.8284023668639053, correct 140/169\n",
      "class 1: acc 0.8473684210526315, correct 161/190\n",
      "class 2: acc 0.91005291005291, correct 172/189\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3603, val_error: 0.1148, auc: 0.9664\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8846153846153846, correct 23/26\n",
      "class 1: acc 0.8636363636363636, correct 19/22\n",
      "class 2: acc 0.9230769230769231, correct 12/13\n",
      "Validation loss decreased (0.929215 --> 0.966403).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.6683, instance_loss: 0.1780, weighted_loss: 0.5212, label: 2, bag_size: 11415\n",
      "batch 39, loss: 0.0296, instance_loss: 0.0002, weighted_loss: 0.0208, label: 2, bag_size: 103823\n",
      "batch 59, loss: 0.0228, instance_loss: 0.0008, weighted_loss: 0.0162, label: 0, bag_size: 65381\n",
      "batch 79, loss: 0.0336, instance_loss: 0.0076, weighted_loss: 0.0258, label: 0, bag_size: 93767\n",
      "batch 99, loss: 0.0997, instance_loss: 0.0136, weighted_loss: 0.0738, label: 2, bag_size: 59224\n",
      "batch 119, loss: 0.1436, instance_loss: 0.0124, weighted_loss: 0.1042, label: 2, bag_size: 19057\n",
      "batch 139, loss: 0.1009, instance_loss: 0.0007, weighted_loss: 0.0709, label: 0, bag_size: 60909\n",
      "batch 159, loss: 0.3475, instance_loss: 0.0092, weighted_loss: 0.2460, label: 2, bag_size: 65317\n",
      "batch 179, loss: 0.0882, instance_loss: 0.0100, weighted_loss: 0.0647, label: 1, bag_size: 28349\n",
      "batch 199, loss: 0.0264, instance_loss: 0.0018, weighted_loss: 0.0190, label: 2, bag_size: 68187\n",
      "batch 219, loss: 0.4414, instance_loss: 0.0160, weighted_loss: 0.3138, label: 2, bag_size: 87135\n",
      "batch 239, loss: 0.0004, instance_loss: 0.0013, weighted_loss: 0.0006, label: 0, bag_size: 45490\n",
      "batch 259, loss: 1.1639, instance_loss: 0.0115, weighted_loss: 0.8182, label: 0, bag_size: 16184\n",
      "batch 279, loss: 0.0019, instance_loss: 0.0159, weighted_loss: 0.0061, label: 1, bag_size: 16753\n",
      "batch 299, loss: 1.4911, instance_loss: 0.0077, weighted_loss: 1.0461, label: 1, bag_size: 20115\n",
      "batch 319, loss: 0.0457, instance_loss: 0.0035, weighted_loss: 0.0331, label: 2, bag_size: 38471\n",
      "batch 339, loss: 0.0038, instance_loss: 0.0028, weighted_loss: 0.0035, label: 0, bag_size: 59882\n",
      "batch 359, loss: 0.0328, instance_loss: 0.0042, weighted_loss: 0.0242, label: 2, bag_size: 66023\n",
      "batch 379, loss: 0.0162, instance_loss: 0.0260, weighted_loss: 0.0192, label: 0, bag_size: 12044\n",
      "batch 399, loss: 0.6429, instance_loss: 0.0108, weighted_loss: 0.4533, label: 0, bag_size: 52270\n",
      "batch 419, loss: 0.0501, instance_loss: 0.0013, weighted_loss: 0.0355, label: 0, bag_size: 36141\n",
      "batch 439, loss: 0.0445, instance_loss: 0.0169, weighted_loss: 0.0362, label: 0, bag_size: 30263\n",
      "batch 459, loss: 0.0142, instance_loss: 0.0056, weighted_loss: 0.0116, label: 2, bag_size: 63921\n",
      "batch 479, loss: 0.0005, instance_loss: 0.0019, weighted_loss: 0.0010, label: 0, bag_size: 18929\n",
      "batch 499, loss: 0.0125, instance_loss: 0.0498, weighted_loss: 0.0237, label: 2, bag_size: 66345\n",
      "batch 519, loss: 0.2669, instance_loss: 0.0016, weighted_loss: 0.1873, label: 2, bag_size: 91267\n",
      "batch 539, loss: 1.3533, instance_loss: 0.0021, weighted_loss: 0.9480, label: 0, bag_size: 76412\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.9977189781021898: correct 4374/4384\n",
      "class 1 clustering acc 0.9986313868613139: correct 4378/4384\n",
      "Epoch: 2, train_loss: 0.3204, train_clustering_loss:  0.0180, train_error: 0.1095\n",
      "class 0: acc 0.8602150537634409, correct 160/186\n",
      "class 1: acc 0.874251497005988, correct 146/167\n",
      "class 2: acc 0.9333333333333333, correct 182/195\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4508, val_error: 0.1148, auc: 0.9653\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.9230769230769231, correct 24/26\n",
      "class 1: acc 0.7727272727272727, correct 17/22\n",
      "class 2: acc 1.0, correct 13/13\n",
      "EarlyStopping counter: 1 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0223, instance_loss: 0.0484, weighted_loss: 0.0301, label: 2, bag_size: 33828\n",
      "batch 39, loss: 0.1862, instance_loss: 0.0073, weighted_loss: 0.1325, label: 0, bag_size: 63845\n",
      "batch 59, loss: 0.0008, instance_loss: 0.0008, weighted_loss: 0.0008, label: 0, bag_size: 75790\n",
      "batch 79, loss: 0.3844, instance_loss: 0.0029, weighted_loss: 0.2700, label: 2, bag_size: 63921\n",
      "batch 99, loss: 2.3426, instance_loss: 0.0705, weighted_loss: 1.6610, label: 0, bag_size: 9339\n",
      "batch 119, loss: 0.2096, instance_loss: 0.0527, weighted_loss: 0.1625, label: 0, bag_size: 60709\n",
      "batch 139, loss: 0.1173, instance_loss: 0.0252, weighted_loss: 0.0897, label: 1, bag_size: 39620\n",
      "batch 159, loss: 0.6213, instance_loss: 0.0135, weighted_loss: 0.4390, label: 1, bag_size: 22123\n",
      "batch 179, loss: 0.1135, instance_loss: 0.0038, weighted_loss: 0.0806, label: 0, bag_size: 29966\n",
      "batch 199, loss: 0.0268, instance_loss: 0.0251, weighted_loss: 0.0263, label: 2, bag_size: 38471\n",
      "batch 219, loss: 0.1153, instance_loss: 0.0013, weighted_loss: 0.0811, label: 0, bag_size: 94524\n",
      "batch 239, loss: 0.1217, instance_loss: 0.0006, weighted_loss: 0.0854, label: 0, bag_size: 70854\n",
      "batch 259, loss: 0.0148, instance_loss: 0.0017, weighted_loss: 0.0109, label: 2, bag_size: 68187\n",
      "batch 279, loss: 0.1032, instance_loss: 0.0027, weighted_loss: 0.0730, label: 1, bag_size: 42997\n",
      "batch 299, loss: 0.0736, instance_loss: 0.0063, weighted_loss: 0.0534, label: 2, bag_size: 55626\n",
      "batch 319, loss: 0.0071, instance_loss: 0.1431, weighted_loss: 0.0479, label: 2, bag_size: 30615\n",
      "batch 339, loss: 0.0021, instance_loss: 0.0007, weighted_loss: 0.0017, label: 1, bag_size: 100810\n",
      "batch 359, loss: 0.0340, instance_loss: 0.0076, weighted_loss: 0.0261, label: 1, bag_size: 86381\n",
      "batch 379, loss: 0.0735, instance_loss: 0.0481, weighted_loss: 0.0659, label: 0, bag_size: 49951\n",
      "batch 399, loss: 0.0627, instance_loss: 0.0016, weighted_loss: 0.0444, label: 2, bag_size: 71763\n",
      "batch 419, loss: 0.0198, instance_loss: 0.0002, weighted_loss: 0.0139, label: 0, bag_size: 92595\n",
      "batch 439, loss: 1.9961, instance_loss: 0.0009, weighted_loss: 1.3975, label: 2, bag_size: 23841\n",
      "batch 459, loss: 0.0041, instance_loss: 0.0008, weighted_loss: 0.0031, label: 0, bag_size: 53568\n",
      "batch 479, loss: 0.3052, instance_loss: 0.0027, weighted_loss: 0.2145, label: 1, bag_size: 23981\n",
      "batch 499, loss: 0.0015, instance_loss: 0.0016, weighted_loss: 0.0015, label: 0, bag_size: 114059\n",
      "batch 519, loss: 0.0149, instance_loss: 0.0004, weighted_loss: 0.0106, label: 0, bag_size: 95055\n",
      "batch 539, loss: 0.8998, instance_loss: 0.0000, weighted_loss: 0.6299, label: 1, bag_size: 45064\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.999315693430657: correct 4381/4384\n",
      "class 1 clustering acc 0.9977189781021898: correct 4374/4384\n",
      "Epoch: 3, train_loss: 0.2755, train_clustering_loss:  0.0151, train_error: 0.0912\n",
      "class 0: acc 0.9047619047619048, correct 190/210\n",
      "class 1: acc 0.8511904761904762, correct 143/168\n",
      "class 2: acc 0.9705882352941176, correct 165/170\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3569, val_error: 0.0984, auc: 0.9669\n",
      "class 0 clustering acc 0.9979508196721312: correct 487/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8461538461538461, correct 22/26\n",
      "class 1: acc 0.9545454545454546, correct 21/22\n",
      "class 2: acc 0.9230769230769231, correct 12/13\n",
      "Validation loss decreased (0.966403 --> 0.966907).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0067, instance_loss: 0.0011, weighted_loss: 0.0050, label: 2, bag_size: 47603\n",
      "batch 39, loss: 0.0676, instance_loss: 0.0013, weighted_loss: 0.0477, label: 2, bag_size: 40315\n",
      "batch 59, loss: 0.0178, instance_loss: 0.0008, weighted_loss: 0.0127, label: 1, bag_size: 113186\n",
      "batch 79, loss: 0.3424, instance_loss: 0.0131, weighted_loss: 0.2436, label: 0, bag_size: 14325\n",
      "batch 99, loss: 0.6085, instance_loss: 0.0009, weighted_loss: 0.4262, label: 0, bag_size: 21504\n",
      "batch 119, loss: 0.0125, instance_loss: 0.0216, weighted_loss: 0.0152, label: 1, bag_size: 4094\n",
      "batch 139, loss: 0.0021, instance_loss: 0.0018, weighted_loss: 0.0020, label: 1, bag_size: 66357\n",
      "batch 159, loss: 0.0040, instance_loss: 0.0007, weighted_loss: 0.0030, label: 1, bag_size: 13964\n",
      "batch 179, loss: 0.0067, instance_loss: 0.0050, weighted_loss: 0.0062, label: 2, bag_size: 38471\n",
      "batch 199, loss: 0.0224, instance_loss: 0.0232, weighted_loss: 0.0226, label: 2, bag_size: 1329\n",
      "batch 219, loss: 0.0274, instance_loss: 0.0004, weighted_loss: 0.0193, label: 1, bag_size: 42941\n",
      "batch 239, loss: 0.0200, instance_loss: 0.0084, weighted_loss: 0.0165, label: 2, bag_size: 38471\n",
      "batch 259, loss: 0.0035, instance_loss: 0.0002, weighted_loss: 0.0025, label: 1, bag_size: 66357\n",
      "batch 279, loss: 0.2980, instance_loss: 0.0194, weighted_loss: 0.2144, label: 2, bag_size: 23841\n",
      "batch 299, loss: 0.0049, instance_loss: 0.0025, weighted_loss: 0.0042, label: 0, bag_size: 23506\n",
      "batch 319, loss: 0.0146, instance_loss: 0.0006, weighted_loss: 0.0104, label: 2, bag_size: 103823\n",
      "batch 339, loss: 0.0022, instance_loss: 0.0021, weighted_loss: 0.0022, label: 1, bag_size: 74826\n",
      "batch 359, loss: 0.0027, instance_loss: 0.0387, weighted_loss: 0.0135, label: 1, bag_size: 44760\n",
      "batch 379, loss: 0.0173, instance_loss: 0.0038, weighted_loss: 0.0132, label: 2, bag_size: 49458\n",
      "batch 399, loss: 0.0069, instance_loss: 0.0139, weighted_loss: 0.0090, label: 1, bag_size: 12138\n",
      "batch 419, loss: 0.0151, instance_loss: 0.0139, weighted_loss: 0.0148, label: 1, bag_size: 91540\n",
      "batch 439, loss: 0.0260, instance_loss: 0.0060, weighted_loss: 0.0200, label: 0, bag_size: 7780\n",
      "batch 459, loss: 0.0055, instance_loss: 0.0095, weighted_loss: 0.0067, label: 2, bag_size: 48593\n",
      "batch 479, loss: 0.2140, instance_loss: 0.0043, weighted_loss: 0.1511, label: 2, bag_size: 64675\n",
      "batch 499, loss: 0.0149, instance_loss: 0.0019, weighted_loss: 0.0110, label: 1, bag_size: 113186\n",
      "batch 519, loss: 0.3831, instance_loss: 0.0076, weighted_loss: 0.2704, label: 0, bag_size: 49951\n",
      "batch 539, loss: 0.0023, instance_loss: 0.0004, weighted_loss: 0.0017, label: 2, bag_size: 47603\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.999771897810219: correct 4383/4384\n",
      "class 1 clustering acc 0.999771897810219: correct 4383/4384\n",
      "Epoch: 4, train_loss: 0.2208, train_clustering_loss:  0.0108, train_error: 0.0712\n",
      "class 0: acc 0.9, correct 153/170\n",
      "class 1: acc 0.9152542372881356, correct 162/177\n",
      "class 2: acc 0.9651741293532339, correct 194/201\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3425, val_error: 0.0984, auc: 0.9712\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 0.9979508196721312: correct 487/488\n",
      "class 0: acc 0.9230769230769231, correct 24/26\n",
      "class 1: acc 0.8181818181818182, correct 18/22\n",
      "class 2: acc 1.0, correct 13/13\n",
      "Validation loss decreased (0.966907 --> 0.971233).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0005, instance_loss: 0.0087, weighted_loss: 0.0030, label: 1, bag_size: 64851\n",
      "batch 39, loss: 0.0024, instance_loss: 0.0062, weighted_loss: 0.0036, label: 0, bag_size: 47840\n",
      "batch 59, loss: 0.5308, instance_loss: 0.0079, weighted_loss: 0.3739, label: 2, bag_size: 23841\n",
      "batch 79, loss: 0.0144, instance_loss: 0.0005, weighted_loss: 0.0102, label: 1, bag_size: 84060\n",
      "batch 99, loss: 0.1840, instance_loss: 0.0048, weighted_loss: 0.1302, label: 1, bag_size: 66144\n",
      "batch 119, loss: 0.4041, instance_loss: 0.0035, weighted_loss: 0.2839, label: 1, bag_size: 12019\n",
      "batch 139, loss: 0.0309, instance_loss: 0.0040, weighted_loss: 0.0228, label: 2, bag_size: 33828\n",
      "batch 159, loss: 0.0261, instance_loss: 0.0017, weighted_loss: 0.0188, label: 0, bag_size: 59285\n",
      "batch 179, loss: 0.4137, instance_loss: 0.0040, weighted_loss: 0.2908, label: 1, bag_size: 51926\n",
      "batch 199, loss: 0.0048, instance_loss: 0.0026, weighted_loss: 0.0041, label: 0, bag_size: 65592\n",
      "batch 219, loss: 0.1009, instance_loss: 0.0109, weighted_loss: 0.0739, label: 1, bag_size: 8400\n",
      "batch 239, loss: 0.9393, instance_loss: 0.0000, weighted_loss: 0.6575, label: 2, bag_size: 103823\n",
      "batch 259, loss: 0.0438, instance_loss: 0.0470, weighted_loss: 0.0447, label: 1, bag_size: 5928\n",
      "batch 279, loss: 0.0185, instance_loss: 0.0009, weighted_loss: 0.0132, label: 0, bag_size: 79139\n",
      "batch 299, loss: 0.0043, instance_loss: 0.0052, weighted_loss: 0.0046, label: 1, bag_size: 74927\n",
      "batch 319, loss: 0.0719, instance_loss: 0.0109, weighted_loss: 0.0536, label: 2, bag_size: 34408\n",
      "batch 339, loss: 0.1019, instance_loss: 0.0066, weighted_loss: 0.0733, label: 2, bag_size: 10243\n",
      "batch 359, loss: 0.0033, instance_loss: 0.0036, weighted_loss: 0.0034, label: 0, bag_size: 53706\n",
      "batch 379, loss: 0.9926, instance_loss: 0.0274, weighted_loss: 0.7030, label: 2, bag_size: 52584\n",
      "batch 399, loss: 0.0037, instance_loss: 0.0040, weighted_loss: 0.0037, label: 2, bag_size: 68187\n",
      "batch 419, loss: 0.8426, instance_loss: 0.0016, weighted_loss: 0.5903, label: 1, bag_size: 87674\n",
      "batch 439, loss: 0.0390, instance_loss: 0.0011, weighted_loss: 0.0276, label: 1, bag_size: 54312\n",
      "batch 459, loss: 0.0118, instance_loss: 0.0019, weighted_loss: 0.0089, label: 2, bag_size: 65340\n",
      "batch 479, loss: 0.0040, instance_loss: 0.0028, weighted_loss: 0.0036, label: 1, bag_size: 8400\n",
      "batch 499, loss: 0.0311, instance_loss: 0.0136, weighted_loss: 0.0259, label: 0, bag_size: 15052\n",
      "batch 519, loss: 0.0110, instance_loss: 0.0177, weighted_loss: 0.0130, label: 2, bag_size: 71763\n",
      "batch 539, loss: 0.0088, instance_loss: 0.0047, weighted_loss: 0.0076, label: 2, bag_size: 68187\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.999771897810219: correct 4383/4384\n",
      "class 1 clustering acc 0.9995437956204379: correct 4382/4384\n",
      "Epoch: 5, train_loss: 0.2219, train_clustering_loss:  0.0103, train_error: 0.0821\n",
      "class 0: acc 0.8830409356725146, correct 151/171\n",
      "class 1: acc 0.9044943820224719, correct 161/178\n",
      "class 2: acc 0.9597989949748744, correct 191/199\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4397, val_error: 0.1311, auc: 0.9689\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8076923076923077, correct 21/26\n",
      "class 1: acc 0.9545454545454546, correct 21/22\n",
      "class 2: acc 0.8461538461538461, correct 11/13\n",
      "EarlyStopping counter: 1 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.1369, instance_loss: 0.0000, weighted_loss: 0.0959, label: 0, bag_size: 58702\n",
      "batch 39, loss: 0.4426, instance_loss: 0.0004, weighted_loss: 0.3100, label: 1, bag_size: 42997\n",
      "batch 59, loss: 0.0508, instance_loss: 0.0052, weighted_loss: 0.0371, label: 0, bag_size: 59830\n",
      "batch 79, loss: 0.0040, instance_loss: 0.0039, weighted_loss: 0.0040, label: 2, bag_size: 95428\n",
      "batch 99, loss: 0.1097, instance_loss: 0.0000, weighted_loss: 0.0768, label: 0, bag_size: 51591\n",
      "batch 119, loss: 0.3319, instance_loss: 0.0124, weighted_loss: 0.2360, label: 2, bag_size: 65317\n",
      "batch 139, loss: 0.0001, instance_loss: 0.0002, weighted_loss: 0.0002, label: 0, bag_size: 29980\n",
      "batch 159, loss: 0.0185, instance_loss: 0.0061, weighted_loss: 0.0148, label: 0, bag_size: 78549\n",
      "batch 179, loss: 0.0003, instance_loss: 0.0000, weighted_loss: 0.0002, label: 0, bag_size: 70278\n",
      "batch 199, loss: 0.0014, instance_loss: 0.0002, weighted_loss: 0.0010, label: 0, bag_size: 92502\n",
      "batch 219, loss: 0.0109, instance_loss: 0.0049, weighted_loss: 0.0091, label: 1, bag_size: 17958\n",
      "batch 239, loss: 1.5776, instance_loss: 0.0000, weighted_loss: 1.1043, label: 1, bag_size: 112356\n",
      "batch 259, loss: 0.0440, instance_loss: 0.0003, weighted_loss: 0.0309, label: 0, bag_size: 94524\n",
      "batch 279, loss: 0.3842, instance_loss: 0.0000, weighted_loss: 0.2690, label: 1, bag_size: 114116\n",
      "batch 299, loss: 0.1984, instance_loss: 0.0043, weighted_loss: 0.1402, label: 1, bag_size: 50909\n",
      "batch 319, loss: 0.0024, instance_loss: 0.0000, weighted_loss: 0.0017, label: 0, bag_size: 39692\n",
      "batch 339, loss: 0.0041, instance_loss: 0.0006, weighted_loss: 0.0030, label: 0, bag_size: 23753\n",
      "batch 359, loss: 0.3337, instance_loss: 0.0014, weighted_loss: 0.2340, label: 1, bag_size: 65195\n",
      "batch 379, loss: 0.7747, instance_loss: 0.0048, weighted_loss: 0.5438, label: 2, bag_size: 76843\n",
      "batch 399, loss: 0.0307, instance_loss: 0.0031, weighted_loss: 0.0224, label: 2, bag_size: 66801\n",
      "batch 419, loss: 0.0641, instance_loss: 0.0014, weighted_loss: 0.0453, label: 2, bag_size: 50978\n",
      "batch 439, loss: 1.1261, instance_loss: 0.0023, weighted_loss: 0.7890, label: 0, bag_size: 100992\n",
      "batch 459, loss: 0.1751, instance_loss: 0.0021, weighted_loss: 0.1232, label: 2, bag_size: 63921\n",
      "batch 479, loss: 0.0833, instance_loss: 0.0033, weighted_loss: 0.0593, label: 1, bag_size: 6649\n",
      "batch 499, loss: 1.4726, instance_loss: 0.0043, weighted_loss: 1.0321, label: 0, bag_size: 41723\n",
      "batch 519, loss: 0.0038, instance_loss: 0.0004, weighted_loss: 0.0028, label: 0, bag_size: 53040\n",
      "batch 539, loss: 0.0086, instance_loss: 0.0029, weighted_loss: 0.0069, label: 0, bag_size: 108807\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.9990875912408759: correct 4380/4384\n",
      "Epoch: 6, train_loss: 0.2334, train_clustering_loss:  0.0100, train_error: 0.0821\n",
      "class 0: acc 0.8980582524271845, correct 185/206\n",
      "class 1: acc 0.89171974522293, correct 140/157\n",
      "class 2: acc 0.9621621621621622, correct 178/185\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3147, val_error: 0.0820, auc: 0.9725\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.9615384615384616, correct 25/26\n",
      "class 1: acc 0.8181818181818182, correct 18/22\n",
      "class 2: acc 1.0, correct 13/13\n",
      "Validation loss decreased (0.971233 --> 0.972509).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 1.0937, instance_loss: 0.0010, weighted_loss: 0.7659, label: 1, bag_size: 96719\n",
      "batch 39, loss: 0.0052, instance_loss: 0.0003, weighted_loss: 0.0037, label: 0, bag_size: 78425\n",
      "batch 59, loss: 0.0299, instance_loss: 0.0008, weighted_loss: 0.0211, label: 0, bag_size: 20131\n",
      "batch 79, loss: 0.3422, instance_loss: 0.0000, weighted_loss: 0.2395, label: 1, bag_size: 109684\n",
      "batch 99, loss: 0.0180, instance_loss: 0.0037, weighted_loss: 0.0137, label: 2, bag_size: 36673\n",
      "batch 119, loss: 0.0719, instance_loss: 0.0021, weighted_loss: 0.0509, label: 2, bag_size: 50978\n",
      "batch 139, loss: 0.0013, instance_loss: 0.0010, weighted_loss: 0.0012, label: 0, bag_size: 12884\n",
      "batch 159, loss: 0.0037, instance_loss: 0.0051, weighted_loss: 0.0041, label: 0, bag_size: 25002\n",
      "batch 179, loss: 0.0033, instance_loss: 0.0036, weighted_loss: 0.0034, label: 2, bag_size: 46661\n",
      "batch 199, loss: 0.0426, instance_loss: 0.0127, weighted_loss: 0.0337, label: 2, bag_size: 13963\n",
      "batch 219, loss: 0.0311, instance_loss: 0.0077, weighted_loss: 0.0240, label: 0, bag_size: 20672\n",
      "batch 239, loss: 0.0009, instance_loss: 0.0008, weighted_loss: 0.0009, label: 1, bag_size: 50525\n",
      "batch 259, loss: 0.0015, instance_loss: 0.0292, weighted_loss: 0.0098, label: 1, bag_size: 44760\n",
      "batch 279, loss: 0.0095, instance_loss: 0.0011, weighted_loss: 0.0070, label: 2, bag_size: 73189\n",
      "batch 299, loss: 0.1264, instance_loss: 0.0094, weighted_loss: 0.0913, label: 2, bag_size: 23841\n",
      "batch 319, loss: 0.0407, instance_loss: 0.0024, weighted_loss: 0.0292, label: 1, bag_size: 66144\n",
      "batch 339, loss: 0.0277, instance_loss: 0.0002, weighted_loss: 0.0194, label: 0, bag_size: 78986\n",
      "batch 359, loss: 0.4910, instance_loss: 0.0030, weighted_loss: 0.3446, label: 2, bag_size: 23841\n",
      "batch 379, loss: 0.1664, instance_loss: 0.0045, weighted_loss: 0.1178, label: 1, bag_size: 41704\n",
      "batch 399, loss: 0.0768, instance_loss: 0.0175, weighted_loss: 0.0590, label: 1, bag_size: 54398\n",
      "batch 419, loss: 0.0023, instance_loss: 0.0109, weighted_loss: 0.0049, label: 1, bag_size: 10520\n",
      "batch 439, loss: 0.0013, instance_loss: 0.0033, weighted_loss: 0.0019, label: 2, bag_size: 47603\n",
      "batch 459, loss: 0.8130, instance_loss: 0.0002, weighted_loss: 0.5691, label: 1, bag_size: 49548\n",
      "batch 479, loss: 0.0128, instance_loss: 0.0076, weighted_loss: 0.0112, label: 1, bag_size: 20374\n",
      "batch 499, loss: 0.5945, instance_loss: 0.0276, weighted_loss: 0.4244, label: 2, bag_size: 65317\n",
      "batch 519, loss: 1.9787, instance_loss: 0.0194, weighted_loss: 1.3909, label: 0, bag_size: 34677\n",
      "batch 539, loss: 0.0075, instance_loss: 0.0074, weighted_loss: 0.0075, label: 0, bag_size: 80173\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.999771897810219: correct 4383/4384\n",
      "class 1 clustering acc 0.9988594890510949: correct 4379/4384\n",
      "Epoch: 7, train_loss: 0.1868, train_clustering_loss:  0.0093, train_error: 0.0620\n",
      "class 0: acc 0.9195402298850575, correct 160/174\n",
      "class 1: acc 0.9184782608695652, correct 169/184\n",
      "class 2: acc 0.9736842105263158, correct 185/190\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3970, val_error: 0.1639, auc: 0.9607\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.7692307692307693, correct 20/26\n",
      "class 1: acc 0.8181818181818182, correct 18/22\n",
      "class 2: acc 1.0, correct 13/13\n",
      "EarlyStopping counter: 1 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0055, instance_loss: 0.0000, weighted_loss: 0.0038, label: 0, bag_size: 75790\n",
      "batch 39, loss: 0.0548, instance_loss: 0.0030, weighted_loss: 0.0393, label: 2, bag_size: 52584\n",
      "batch 59, loss: 0.0008, instance_loss: 0.0107, weighted_loss: 0.0038, label: 2, bag_size: 66023\n",
      "batch 79, loss: 0.8104, instance_loss: 0.0046, weighted_loss: 0.5687, label: 1, bag_size: 42983\n",
      "batch 99, loss: 0.0123, instance_loss: 0.0210, weighted_loss: 0.0149, label: 1, bag_size: 8400\n",
      "batch 119, loss: 0.0169, instance_loss: 0.0219, weighted_loss: 0.0184, label: 2, bag_size: 48593\n",
      "batch 139, loss: 1.4247, instance_loss: 0.0412, weighted_loss: 1.0096, label: 1, bag_size: 5928\n",
      "batch 159, loss: 0.0636, instance_loss: 0.0036, weighted_loss: 0.0456, label: 0, bag_size: 43244\n",
      "batch 179, loss: 0.0027, instance_loss: 0.0028, weighted_loss: 0.0027, label: 0, bag_size: 23753\n",
      "batch 199, loss: 0.7068, instance_loss: 0.0019, weighted_loss: 0.4953, label: 0, bag_size: 74863\n",
      "batch 219, loss: 0.0168, instance_loss: 0.0051, weighted_loss: 0.0133, label: 2, bag_size: 72966\n",
      "batch 239, loss: 0.0924, instance_loss: 0.0073, weighted_loss: 0.0668, label: 1, bag_size: 62303\n",
      "batch 259, loss: 0.1004, instance_loss: 0.0007, weighted_loss: 0.0705, label: 0, bag_size: 87503\n",
      "batch 279, loss: 0.0404, instance_loss: 0.0007, weighted_loss: 0.0285, label: 2, bag_size: 36673\n",
      "batch 299, loss: 0.3845, instance_loss: 0.0095, weighted_loss: 0.2720, label: 1, bag_size: 51926\n",
      "batch 319, loss: 0.0005, instance_loss: 0.0028, weighted_loss: 0.0012, label: 0, bag_size: 114059\n",
      "batch 339, loss: 0.1574, instance_loss: 0.0000, weighted_loss: 0.1102, label: 1, bag_size: 49548\n",
      "batch 359, loss: 0.0430, instance_loss: 0.0043, weighted_loss: 0.0314, label: 2, bag_size: 50246\n",
      "batch 379, loss: 0.4229, instance_loss: 0.0189, weighted_loss: 0.3017, label: 0, bag_size: 13539\n",
      "batch 399, loss: 0.0327, instance_loss: 0.0099, weighted_loss: 0.0259, label: 1, bag_size: 51221\n",
      "batch 419, loss: 0.0093, instance_loss: 0.0013, weighted_loss: 0.0069, label: 2, bag_size: 95428\n",
      "batch 439, loss: 0.0378, instance_loss: 0.0046, weighted_loss: 0.0279, label: 2, bag_size: 52584\n",
      "batch 459, loss: 0.0106, instance_loss: 0.0020, weighted_loss: 0.0080, label: 0, bag_size: 21648\n",
      "batch 479, loss: 0.2686, instance_loss: 0.0028, weighted_loss: 0.1889, label: 1, bag_size: 35236\n",
      "batch 499, loss: 0.0270, instance_loss: 0.0000, weighted_loss: 0.0189, label: 0, bag_size: 21648\n",
      "batch 519, loss: 0.0097, instance_loss: 0.0000, weighted_loss: 0.0068, label: 2, bag_size: 103823\n",
      "batch 539, loss: 0.0008, instance_loss: 0.0108, weighted_loss: 0.0038, label: 0, bag_size: 13674\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.9986313868613139: correct 4378/4384\n",
      "Epoch: 8, train_loss: 0.1731, train_clustering_loss:  0.0110, train_error: 0.0566\n",
      "class 0: acc 0.9162011173184358, correct 164/179\n",
      "class 1: acc 0.9289617486338798, correct 170/183\n",
      "class 2: acc 0.9838709677419355, correct 183/186\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3113, val_error: 0.0820, auc: 0.9730\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.9230769230769231, correct 24/26\n",
      "class 1: acc 0.8636363636363636, correct 19/22\n",
      "class 2: acc 1.0, correct 13/13\n",
      "Validation loss decreased (0.972509 --> 0.972999).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0341, instance_loss: 0.0003, weighted_loss: 0.0239, label: 0, bag_size: 68879\n",
      "batch 39, loss: 0.0114, instance_loss: 0.0006, weighted_loss: 0.0082, label: 2, bag_size: 72015\n",
      "batch 59, loss: 0.0101, instance_loss: 0.0000, weighted_loss: 0.0071, label: 0, bag_size: 51591\n",
      "batch 79, loss: 0.0001, instance_loss: 0.0004, weighted_loss: 0.0002, label: 0, bag_size: 9374\n",
      "batch 99, loss: 0.0163, instance_loss: 0.0003, weighted_loss: 0.0115, label: 2, bag_size: 38471\n",
      "batch 119, loss: 0.0051, instance_loss: 0.0041, weighted_loss: 0.0048, label: 1, bag_size: 49597\n",
      "batch 139, loss: 0.8061, instance_loss: 0.0222, weighted_loss: 0.5709, label: 1, bag_size: 5928\n",
      "batch 159, loss: 0.0121, instance_loss: 0.0021, weighted_loss: 0.0091, label: 0, bag_size: 14481\n",
      "batch 179, loss: 0.0022, instance_loss: 0.0064, weighted_loss: 0.0035, label: 1, bag_size: 13709\n",
      "batch 199, loss: 0.0059, instance_loss: 0.0068, weighted_loss: 0.0062, label: 1, bag_size: 27666\n",
      "batch 219, loss: 0.0464, instance_loss: 0.0004, weighted_loss: 0.0326, label: 0, bag_size: 82955\n",
      "batch 239, loss: 0.0033, instance_loss: 0.0007, weighted_loss: 0.0025, label: 0, bag_size: 41071\n",
      "batch 259, loss: 0.0026, instance_loss: 0.0199, weighted_loss: 0.0078, label: 1, bag_size: 2328\n",
      "batch 279, loss: 0.0155, instance_loss: 0.0098, weighted_loss: 0.0138, label: 1, bag_size: 18525\n",
      "batch 299, loss: 0.5595, instance_loss: 0.0153, weighted_loss: 0.3962, label: 0, bag_size: 13539\n",
      "batch 319, loss: 0.0061, instance_loss: 0.0159, weighted_loss: 0.0090, label: 1, bag_size: 12022\n",
      "batch 339, loss: 0.0001, instance_loss: 0.0039, weighted_loss: 0.0012, label: 1, bag_size: 28713\n",
      "batch 359, loss: 0.4705, instance_loss: 0.0045, weighted_loss: 0.3307, label: 1, bag_size: 10151\n",
      "batch 379, loss: 0.0261, instance_loss: 0.0084, weighted_loss: 0.0207, label: 1, bag_size: 39620\n",
      "batch 399, loss: 0.0006, instance_loss: 0.0026, weighted_loss: 0.0012, label: 2, bag_size: 72966\n",
      "batch 419, loss: 0.2911, instance_loss: 0.0002, weighted_loss: 0.2038, label: 0, bag_size: 56229\n",
      "batch 439, loss: 0.0001, instance_loss: 0.0012, weighted_loss: 0.0004, label: 0, bag_size: 52973\n",
      "batch 459, loss: 0.0005, instance_loss: 0.0056, weighted_loss: 0.0020, label: 2, bag_size: 68187\n",
      "batch 479, loss: 0.1046, instance_loss: 0.0050, weighted_loss: 0.0747, label: 0, bag_size: 29966\n",
      "batch 499, loss: 0.3186, instance_loss: 0.0044, weighted_loss: 0.2244, label: 1, bag_size: 79573\n",
      "batch 519, loss: 0.0162, instance_loss: 0.0025, weighted_loss: 0.0121, label: 1, bag_size: 46625\n",
      "batch 539, loss: 0.8880, instance_loss: 0.0060, weighted_loss: 0.6234, label: 0, bag_size: 67027\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.999315693430657: correct 4381/4384\n",
      "Epoch: 9, train_loss: 0.1581, train_clustering_loss:  0.0062, train_error: 0.0584\n",
      "class 0: acc 0.9120879120879121, correct 166/182\n",
      "class 1: acc 0.9421052631578948, correct 179/190\n",
      "class 2: acc 0.9715909090909091, correct 171/176\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3403, val_error: 0.0984, auc: 0.9652\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.9615384615384616, correct 25/26\n",
      "class 1: acc 0.7727272727272727, correct 17/22\n",
      "class 2: acc 1.0, correct 13/13\n",
      "EarlyStopping counter: 1 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0465, instance_loss: 0.0015, weighted_loss: 0.0330, label: 2, bag_size: 28252\n",
      "batch 39, loss: 0.2901, instance_loss: 0.0085, weighted_loss: 0.2056, label: 1, bag_size: 7532\n",
      "batch 59, loss: 0.0010, instance_loss: 0.0041, weighted_loss: 0.0019, label: 2, bag_size: 38471\n",
      "batch 79, loss: 0.0110, instance_loss: 0.0016, weighted_loss: 0.0082, label: 0, bag_size: 60909\n",
      "batch 99, loss: 0.0482, instance_loss: 0.0160, weighted_loss: 0.0385, label: 2, bag_size: 34408\n",
      "batch 119, loss: 0.0557, instance_loss: 0.0139, weighted_loss: 0.0432, label: 1, bag_size: 41704\n",
      "batch 139, loss: 0.1508, instance_loss: 0.0022, weighted_loss: 0.1062, label: 0, bag_size: 53043\n",
      "batch 159, loss: 0.1250, instance_loss: 0.0013, weighted_loss: 0.0879, label: 0, bag_size: 82955\n",
      "batch 179, loss: 0.0022, instance_loss: 0.0004, weighted_loss: 0.0016, label: 0, bag_size: 78425\n",
      "batch 199, loss: 0.0386, instance_loss: 0.0025, weighted_loss: 0.0277, label: 1, bag_size: 62303\n",
      "batch 219, loss: 0.0059, instance_loss: 0.0023, weighted_loss: 0.0048, label: 2, bag_size: 54040\n",
      "batch 239, loss: 0.2404, instance_loss: 0.0024, weighted_loss: 0.1690, label: 2, bag_size: 51554\n",
      "batch 259, loss: 0.5574, instance_loss: 0.0056, weighted_loss: 0.3919, label: 0, bag_size: 60709\n",
      "batch 279, loss: 2.7005, instance_loss: 0.0110, weighted_loss: 1.8937, label: 1, bag_size: 30632\n",
      "batch 299, loss: 0.0038, instance_loss: 0.0047, weighted_loss: 0.0041, label: 0, bag_size: 125590\n",
      "batch 319, loss: 0.0031, instance_loss: 0.0011, weighted_loss: 0.0025, label: 2, bag_size: 72966\n",
      "batch 339, loss: 0.0357, instance_loss: 0.0151, weighted_loss: 0.0296, label: 2, bag_size: 65317\n",
      "batch 359, loss: 0.0058, instance_loss: 0.0054, weighted_loss: 0.0057, label: 0, bag_size: 6579\n",
      "batch 379, loss: 0.0017, instance_loss: 0.0008, weighted_loss: 0.0014, label: 0, bag_size: 60909\n",
      "batch 399, loss: 0.0258, instance_loss: 0.0006, weighted_loss: 0.0183, label: 0, bag_size: 82846\n",
      "batch 419, loss: 0.0912, instance_loss: 0.0151, weighted_loss: 0.0684, label: 2, bag_size: 47138\n",
      "batch 439, loss: 0.0536, instance_loss: 0.0068, weighted_loss: 0.0395, label: 1, bag_size: 81168\n",
      "batch 459, loss: 0.0051, instance_loss: 0.0151, weighted_loss: 0.0081, label: 0, bag_size: 11462\n",
      "batch 479, loss: 0.4561, instance_loss: 0.0321, weighted_loss: 0.3289, label: 0, bag_size: 7318\n",
      "batch 499, loss: 0.1996, instance_loss: 0.1065, weighted_loss: 0.1717, label: 0, bag_size: 6646\n",
      "batch 519, loss: 0.0034, instance_loss: 0.0037, weighted_loss: 0.0035, label: 1, bag_size: 13964\n",
      "batch 539, loss: 0.0625, instance_loss: 0.0032, weighted_loss: 0.0447, label: 0, bag_size: 84811\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.9979470802919708: correct 4375/4384\n",
      "Epoch: 10, train_loss: 0.2004, train_clustering_loss:  0.0099, train_error: 0.0785\n",
      "class 0: acc 0.8707865168539326, correct 155/178\n",
      "class 1: acc 0.9213483146067416, correct 164/178\n",
      "class 2: acc 0.96875, correct 186/192\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4823, val_error: 0.1639, auc: 0.9602\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.6923076923076923, correct 18/26\n",
      "class 1: acc 0.9090909090909091, correct 20/22\n",
      "class 2: acc 1.0, correct 13/13\n",
      "EarlyStopping counter: 2 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0024, instance_loss: 0.0047, weighted_loss: 0.0031, label: 0, bag_size: 47840\n",
      "batch 39, loss: 0.0007, instance_loss: 0.0015, weighted_loss: 0.0010, label: 0, bag_size: 61984\n",
      "batch 59, loss: 0.0031, instance_loss: 0.0035, weighted_loss: 0.0032, label: 0, bag_size: 61703\n",
      "batch 79, loss: 0.0002, instance_loss: 0.0083, weighted_loss: 0.0026, label: 0, bag_size: 31339\n",
      "batch 99, loss: 0.0238, instance_loss: 0.0019, weighted_loss: 0.0172, label: 2, bag_size: 52584\n",
      "batch 119, loss: 0.0033, instance_loss: 0.0019, weighted_loss: 0.0029, label: 2, bag_size: 69002\n",
      "batch 139, loss: 0.0036, instance_loss: 0.0050, weighted_loss: 0.0040, label: 2, bag_size: 11415\n",
      "batch 159, loss: 0.0001, instance_loss: 0.0075, weighted_loss: 0.0023, label: 1, bag_size: 92385\n",
      "batch 179, loss: 0.0097, instance_loss: 0.0006, weighted_loss: 0.0069, label: 2, bag_size: 65340\n",
      "batch 199, loss: 0.0195, instance_loss: 0.0036, weighted_loss: 0.0147, label: 1, bag_size: 56004\n",
      "batch 219, loss: 0.0018, instance_loss: 0.0024, weighted_loss: 0.0020, label: 2, bag_size: 69002\n",
      "batch 239, loss: 0.0003, instance_loss: 0.0032, weighted_loss: 0.0012, label: 2, bag_size: 49458\n",
      "batch 259, loss: 0.0152, instance_loss: 0.0064, weighted_loss: 0.0126, label: 1, bag_size: 115202\n",
      "batch 279, loss: 0.0485, instance_loss: 0.0035, weighted_loss: 0.0350, label: 2, bag_size: 19057\n",
      "batch 299, loss: 0.0094, instance_loss: 0.0065, weighted_loss: 0.0085, label: 1, bag_size: 41704\n",
      "batch 319, loss: 0.5528, instance_loss: 0.0010, weighted_loss: 0.3873, label: 1, bag_size: 52176\n",
      "batch 339, loss: 0.0064, instance_loss: 0.0283, weighted_loss: 0.0130, label: 1, bag_size: 14553\n",
      "batch 359, loss: 0.0069, instance_loss: 0.0000, weighted_loss: 0.0048, label: 0, bag_size: 68879\n",
      "batch 379, loss: 1.1241, instance_loss: 0.0006, weighted_loss: 0.7871, label: 1, bag_size: 105100\n",
      "batch 399, loss: 0.0097, instance_loss: 0.0049, weighted_loss: 0.0083, label: 2, bag_size: 66801\n",
      "batch 419, loss: 0.1464, instance_loss: 0.0119, weighted_loss: 0.1060, label: 2, bag_size: 23841\n",
      "batch 439, loss: 0.0058, instance_loss: 0.0410, weighted_loss: 0.0164, label: 0, bag_size: 19587\n",
      "batch 459, loss: 1.0486, instance_loss: 0.0008, weighted_loss: 0.7343, label: 2, bag_size: 51554\n",
      "batch 479, loss: 0.0010, instance_loss: 0.0010, weighted_loss: 0.0010, label: 0, bag_size: 51603\n",
      "batch 499, loss: 0.0101, instance_loss: 0.0012, weighted_loss: 0.0074, label: 0, bag_size: 76572\n",
      "batch 519, loss: 0.0002, instance_loss: 0.0041, weighted_loss: 0.0014, label: 2, bag_size: 66345\n",
      "batch 539, loss: 0.0085, instance_loss: 0.0018, weighted_loss: 0.0065, label: 0, bag_size: 73149\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.999315693430657: correct 4381/4384\n",
      "Epoch: 11, train_loss: 0.1430, train_clustering_loss:  0.0067, train_error: 0.0493\n",
      "class 0: acc 0.9248554913294798, correct 160/173\n",
      "class 1: acc 0.9528795811518325, correct 182/191\n",
      "class 2: acc 0.9728260869565217, correct 179/184\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3973, val_error: 0.0984, auc: 0.9680\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8846153846153846, correct 23/26\n",
      "class 1: acc 0.8636363636363636, correct 19/22\n",
      "class 2: acc 1.0, correct 13/13\n",
      "EarlyStopping counter: 3 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0493, instance_loss: 0.0037, weighted_loss: 0.0356, label: 0, bag_size: 58349\n",
      "batch 39, loss: 0.0693, instance_loss: 0.0008, weighted_loss: 0.0488, label: 1, bag_size: 75744\n",
      "batch 59, loss: 0.0048, instance_loss: 0.0038, weighted_loss: 0.0045, label: 2, bag_size: 55626\n",
      "batch 79, loss: 0.0027, instance_loss: 0.0106, weighted_loss: 0.0050, label: 2, bag_size: 48881\n",
      "batch 99, loss: 0.0009, instance_loss: 0.0009, weighted_loss: 0.0009, label: 1, bag_size: 49463\n",
      "batch 119, loss: 0.0041, instance_loss: 0.0125, weighted_loss: 0.0066, label: 1, bag_size: 27124\n",
      "batch 139, loss: 0.1460, instance_loss: 0.0081, weighted_loss: 0.1046, label: 2, bag_size: 76843\n",
      "batch 159, loss: 0.0000, instance_loss: 0.0023, weighted_loss: 0.0007, label: 0, bag_size: 9018\n",
      "batch 179, loss: 0.0005, instance_loss: 0.0059, weighted_loss: 0.0022, label: 0, bag_size: 47840\n",
      "batch 199, loss: 0.0134, instance_loss: 0.0015, weighted_loss: 0.0098, label: 0, bag_size: 96694\n",
      "batch 219, loss: 0.0000, instance_loss: 0.0000, weighted_loss: 0.0000, label: 0, bag_size: 92502\n",
      "batch 239, loss: 0.3910, instance_loss: 0.0031, weighted_loss: 0.2746, label: 2, bag_size: 50246\n",
      "batch 259, loss: 0.0001, instance_loss: 0.0018, weighted_loss: 0.0006, label: 0, bag_size: 70278\n",
      "batch 279, loss: 0.7465, instance_loss: 0.0021, weighted_loss: 0.5232, label: 0, bag_size: 76412\n",
      "batch 299, loss: 0.0005, instance_loss: 0.0080, weighted_loss: 0.0027, label: 1, bag_size: 74927\n",
      "batch 319, loss: 0.4555, instance_loss: 0.0015, weighted_loss: 0.3193, label: 1, bag_size: 114116\n",
      "batch 339, loss: 0.0846, instance_loss: 0.0050, weighted_loss: 0.0607, label: 2, bag_size: 78955\n",
      "batch 359, loss: 0.0157, instance_loss: 0.0037, weighted_loss: 0.0121, label: 0, bag_size: 50823\n",
      "batch 379, loss: 0.0001, instance_loss: 0.0031, weighted_loss: 0.0010, label: 2, bag_size: 66345\n",
      "batch 399, loss: 0.0012, instance_loss: 0.0006, weighted_loss: 0.0010, label: 0, bag_size: 53568\n",
      "batch 419, loss: 0.4475, instance_loss: 0.0186, weighted_loss: 0.3189, label: 2, bag_size: 65317\n",
      "batch 439, loss: 0.0229, instance_loss: 0.0153, weighted_loss: 0.0206, label: 1, bag_size: 22123\n",
      "batch 459, loss: 0.0014, instance_loss: 0.0006, weighted_loss: 0.0011, label: 1, bag_size: 80573\n",
      "batch 479, loss: 0.0726, instance_loss: 0.0002, weighted_loss: 0.0509, label: 2, bag_size: 103823\n",
      "batch 499, loss: 0.0007, instance_loss: 0.0080, weighted_loss: 0.0029, label: 1, bag_size: 20382\n",
      "batch 519, loss: 0.0768, instance_loss: 0.0056, weighted_loss: 0.0554, label: 2, bag_size: 65317\n",
      "batch 539, loss: 0.0006, instance_loss: 0.0133, weighted_loss: 0.0044, label: 1, bag_size: 17958\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.9990875912408759: correct 4380/4384\n",
      "Epoch: 12, train_loss: 0.1564, train_clustering_loss:  0.0077, train_error: 0.0511\n",
      "class 0: acc 0.9297297297297298, correct 172/185\n",
      "class 1: acc 0.9318181818181818, correct 164/176\n",
      "class 2: acc 0.983957219251337, correct 184/187\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3762, val_error: 0.0984, auc: 0.9663\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.9230769230769231, correct 24/26\n",
      "class 1: acc 0.8181818181818182, correct 18/22\n",
      "class 2: acc 1.0, correct 13/13\n",
      "EarlyStopping counter: 4 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.8138, instance_loss: 0.0002, weighted_loss: 0.5697, label: 1, bag_size: 109684\n",
      "batch 39, loss: 0.0000, instance_loss: 0.0026, weighted_loss: 0.0008, label: 0, bag_size: 28964\n",
      "batch 59, loss: 0.0376, instance_loss: 0.0020, weighted_loss: 0.0269, label: 2, bag_size: 54040\n",
      "batch 79, loss: 0.0110, instance_loss: 0.0004, weighted_loss: 0.0078, label: 0, bag_size: 59072\n",
      "batch 99, loss: 0.1384, instance_loss: 0.0000, weighted_loss: 0.0969, label: 1, bag_size: 74882\n",
      "batch 119, loss: 0.4393, instance_loss: 0.0072, weighted_loss: 0.3096, label: 0, bag_size: 72204\n",
      "batch 139, loss: 0.0002, instance_loss: 0.0010, weighted_loss: 0.0004, label: 1, bag_size: 57077\n",
      "batch 159, loss: 0.0232, instance_loss: 0.0015, weighted_loss: 0.0167, label: 1, bag_size: 113186\n",
      "batch 179, loss: 0.0008, instance_loss: 0.0037, weighted_loss: 0.0017, label: 2, bag_size: 47603\n",
      "batch 199, loss: 0.0076, instance_loss: 0.0002, weighted_loss: 0.0054, label: 1, bag_size: 77740\n",
      "batch 219, loss: 0.0001, instance_loss: 0.0110, weighted_loss: 0.0034, label: 0, bag_size: 9867\n",
      "batch 239, loss: 0.0023, instance_loss: 0.0019, weighted_loss: 0.0022, label: 2, bag_size: 79373\n",
      "batch 259, loss: 0.0110, instance_loss: 0.0055, weighted_loss: 0.0094, label: 1, bag_size: 119731\n",
      "batch 279, loss: 0.0597, instance_loss: 0.0181, weighted_loss: 0.0472, label: 0, bag_size: 12044\n",
      "batch 299, loss: 0.0023, instance_loss: 0.0046, weighted_loss: 0.0030, label: 2, bag_size: 84436\n",
      "batch 319, loss: 0.0110, instance_loss: 0.0061, weighted_loss: 0.0095, label: 1, bag_size: 10520\n",
      "batch 339, loss: 0.3053, instance_loss: 0.0105, weighted_loss: 0.2168, label: 2, bag_size: 15486\n",
      "batch 359, loss: 0.0183, instance_loss: 0.0123, weighted_loss: 0.0165, label: 1, bag_size: 7667\n",
      "batch 379, loss: 0.0308, instance_loss: 0.0084, weighted_loss: 0.0241, label: 2, bag_size: 34408\n",
      "batch 399, loss: 0.0030, instance_loss: 0.0019, weighted_loss: 0.0027, label: 2, bag_size: 47603\n",
      "batch 419, loss: 0.3561, instance_loss: 0.0054, weighted_loss: 0.2509, label: 2, bag_size: 76843\n",
      "batch 439, loss: 0.0324, instance_loss: 0.0044, weighted_loss: 0.0240, label: 2, bag_size: 65340\n",
      "batch 459, loss: 1.2586, instance_loss: 0.0102, weighted_loss: 0.8841, label: 0, bag_size: 91611\n",
      "batch 479, loss: 0.0354, instance_loss: 0.0026, weighted_loss: 0.0256, label: 2, bag_size: 64675\n",
      "batch 499, loss: 0.0641, instance_loss: 0.0034, weighted_loss: 0.0459, label: 2, bag_size: 51316\n",
      "batch 519, loss: 0.3272, instance_loss: 0.0078, weighted_loss: 0.2314, label: 2, bag_size: 76843\n",
      "batch 539, loss: 0.0117, instance_loss: 0.0020, weighted_loss: 0.0088, label: 2, bag_size: 54040\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.9990875912408759: correct 4380/4384\n",
      "Epoch: 13, train_loss: 0.1589, train_clustering_loss:  0.0090, train_error: 0.0639\n",
      "class 0: acc 0.9212962962962963, correct 199/216\n",
      "class 1: acc 0.9036144578313253, correct 150/166\n",
      "class 2: acc 0.9879518072289156, correct 164/166\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4838, val_error: 0.1148, auc: 0.9673\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.9615384615384616, correct 25/26\n",
      "class 1: acc 0.7272727272727273, correct 16/22\n",
      "class 2: acc 1.0, correct 13/13\n",
      "EarlyStopping counter: 5 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0247, instance_loss: 0.0087, weighted_loss: 0.0199, label: 0, bag_size: 30263\n",
      "batch 39, loss: 0.0153, instance_loss: 0.0064, weighted_loss: 0.0126, label: 2, bag_size: 72686\n",
      "batch 59, loss: 0.0008, instance_loss: 0.0008, weighted_loss: 0.0008, label: 1, bag_size: 79045\n",
      "batch 79, loss: 0.0143, instance_loss: 0.0053, weighted_loss: 0.0116, label: 2, bag_size: 65317\n",
      "batch 99, loss: 1.5889, instance_loss: 0.0021, weighted_loss: 1.1129, label: 0, bag_size: 51028\n",
      "batch 119, loss: 0.0005, instance_loss: 0.0000, weighted_loss: 0.0004, label: 0, bag_size: 69160\n",
      "batch 139, loss: 0.1453, instance_loss: 0.0093, weighted_loss: 0.1045, label: 2, bag_size: 76843\n",
      "batch 159, loss: 0.9552, instance_loss: 0.0114, weighted_loss: 0.6721, label: 1, bag_size: 86619\n",
      "batch 179, loss: 0.0477, instance_loss: 0.0021, weighted_loss: 0.0340, label: 2, bag_size: 51316\n",
      "batch 199, loss: 0.0009, instance_loss: 0.0105, weighted_loss: 0.0038, label: 0, bag_size: 4531\n",
      "batch 219, loss: 0.0139, instance_loss: 0.0021, weighted_loss: 0.0104, label: 0, bag_size: 67453\n",
      "batch 239, loss: 0.0038, instance_loss: 0.0037, weighted_loss: 0.0038, label: 2, bag_size: 69002\n",
      "batch 259, loss: 0.0004, instance_loss: 0.0081, weighted_loss: 0.0027, label: 2, bag_size: 49458\n",
      "batch 279, loss: 0.0318, instance_loss: 0.0025, weighted_loss: 0.0230, label: 1, bag_size: 78923\n",
      "batch 299, loss: 0.0051, instance_loss: 0.0002, weighted_loss: 0.0036, label: 0, bag_size: 75996\n",
      "batch 319, loss: 0.0030, instance_loss: 0.0016, weighted_loss: 0.0026, label: 0, bag_size: 60909\n",
      "batch 339, loss: 0.0000, instance_loss: 0.0023, weighted_loss: 0.0007, label: 0, bag_size: 45490\n",
      "batch 359, loss: 0.0370, instance_loss: 0.0041, weighted_loss: 0.0271, label: 0, bag_size: 88728\n",
      "batch 379, loss: 0.0022, instance_loss: 0.0215, weighted_loss: 0.0080, label: 1, bag_size: 53211\n",
      "batch 399, loss: 0.2615, instance_loss: 0.0010, weighted_loss: 0.1834, label: 1, bag_size: 45064\n",
      "batch 419, loss: 2.6832, instance_loss: 0.0264, weighted_loss: 1.8861, label: 0, bag_size: 59783\n",
      "batch 439, loss: 0.0073, instance_loss: 0.0005, weighted_loss: 0.0053, label: 0, bag_size: 70900\n",
      "batch 459, loss: 0.0018, instance_loss: 0.0096, weighted_loss: 0.0041, label: 1, bag_size: 46798\n",
      "batch 479, loss: 0.0852, instance_loss: 0.0023, weighted_loss: 0.0603, label: 1, bag_size: 62739\n",
      "batch 499, loss: 0.0112, instance_loss: 0.0053, weighted_loss: 0.0094, label: 2, bag_size: 39508\n",
      "batch 519, loss: 0.0445, instance_loss: 0.0048, weighted_loss: 0.0326, label: 1, bag_size: 12138\n",
      "batch 539, loss: 0.0105, instance_loss: 0.0016, weighted_loss: 0.0078, label: 2, bag_size: 51017\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 14, train_loss: 0.1645, train_clustering_loss:  0.0060, train_error: 0.0474\n",
      "class 0: acc 0.9064327485380117, correct 155/171\n",
      "class 1: acc 0.9447513812154696, correct 171/181\n",
      "class 2: acc 1.0, correct 196/196\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4249, val_error: 0.1639, auc: 0.9582\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8076923076923077, correct 21/26\n",
      "class 1: acc 0.7727272727272727, correct 17/22\n",
      "class 2: acc 1.0, correct 13/13\n",
      "EarlyStopping counter: 6 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.7731, instance_loss: 0.0004, weighted_loss: 0.5413, label: 1, bag_size: 45064\n",
      "batch 39, loss: 0.0049, instance_loss: 0.0010, weighted_loss: 0.0037, label: 0, bag_size: 46695\n",
      "batch 59, loss: 0.1284, instance_loss: 0.0313, weighted_loss: 0.0993, label: 2, bag_size: 34408\n",
      "batch 79, loss: 1.4668, instance_loss: 0.0229, weighted_loss: 1.0336, label: 1, bag_size: 51926\n",
      "batch 99, loss: 0.0188, instance_loss: 0.0036, weighted_loss: 0.0142, label: 2, bag_size: 30615\n",
      "batch 119, loss: 0.0600, instance_loss: 0.0057, weighted_loss: 0.0437, label: 0, bag_size: 47840\n",
      "batch 139, loss: 0.0243, instance_loss: 0.0013, weighted_loss: 0.0174, label: 1, bag_size: 91540\n",
      "batch 159, loss: 0.0095, instance_loss: 0.0023, weighted_loss: 0.0073, label: 0, bag_size: 93592\n",
      "batch 179, loss: 0.0141, instance_loss: 0.0064, weighted_loss: 0.0118, label: 2, bag_size: 11415\n",
      "batch 199, loss: 0.0177, instance_loss: 0.0081, weighted_loss: 0.0148, label: 1, bag_size: 60316\n",
      "batch 219, loss: 0.0031, instance_loss: 0.0016, weighted_loss: 0.0027, label: 2, bag_size: 84436\n",
      "batch 239, loss: 0.0027, instance_loss: 0.0069, weighted_loss: 0.0039, label: 2, bag_size: 40315\n",
      "batch 259, loss: 0.0341, instance_loss: 0.0022, weighted_loss: 0.0245, label: 0, bag_size: 38922\n",
      "batch 279, loss: 0.0003, instance_loss: 0.0020, weighted_loss: 0.0009, label: 0, bag_size: 84361\n",
      "batch 299, loss: 0.1257, instance_loss: 0.0050, weighted_loss: 0.0895, label: 1, bag_size: 23736\n",
      "batch 319, loss: 0.0072, instance_loss: 0.0029, weighted_loss: 0.0059, label: 2, bag_size: 69002\n",
      "batch 339, loss: 0.0008, instance_loss: 0.0022, weighted_loss: 0.0012, label: 0, bag_size: 93189\n",
      "batch 359, loss: 0.1571, instance_loss: 0.0041, weighted_loss: 0.1112, label: 2, bag_size: 28252\n",
      "batch 379, loss: 0.1154, instance_loss: 0.0019, weighted_loss: 0.0813, label: 0, bag_size: 67271\n",
      "batch 399, loss: 0.0004, instance_loss: 0.0086, weighted_loss: 0.0028, label: 2, bag_size: 36978\n",
      "batch 419, loss: 0.2141, instance_loss: 0.0083, weighted_loss: 0.1524, label: 1, bag_size: 53467\n",
      "batch 439, loss: 0.0132, instance_loss: 0.0049, weighted_loss: 0.0107, label: 1, bag_size: 172802\n",
      "batch 459, loss: 0.1329, instance_loss: 0.0011, weighted_loss: 0.0934, label: 1, bag_size: 43142\n",
      "batch 479, loss: 0.0229, instance_loss: 0.0015, weighted_loss: 0.0165, label: 1, bag_size: 24044\n",
      "batch 499, loss: 0.0102, instance_loss: 0.0083, weighted_loss: 0.0096, label: 0, bag_size: 11566\n",
      "batch 519, loss: 0.0216, instance_loss: 0.0035, weighted_loss: 0.0162, label: 1, bag_size: 51221\n",
      "batch 539, loss: 0.0000, instance_loss: 0.0041, weighted_loss: 0.0012, label: 0, bag_size: 14487\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 15, train_loss: 0.1439, train_clustering_loss:  0.0059, train_error: 0.0456\n",
      "class 0: acc 0.9221556886227545, correct 154/167\n",
      "class 1: acc 0.9504950495049505, correct 192/202\n",
      "class 2: acc 0.9888268156424581, correct 177/179\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4346, val_error: 0.1803, auc: 0.9689\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.7307692307692307, correct 19/26\n",
      "class 1: acc 0.9090909090909091, correct 20/22\n",
      "class 2: acc 0.8461538461538461, correct 11/13\n",
      "EarlyStopping counter: 7 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0794, instance_loss: 0.0018, weighted_loss: 0.0561, label: 1, bag_size: 65195\n",
      "batch 39, loss: 0.0244, instance_loss: 0.0025, weighted_loss: 0.0178, label: 1, bag_size: 81168\n",
      "batch 59, loss: 0.0078, instance_loss: 0.0004, weighted_loss: 0.0056, label: 1, bag_size: 105523\n",
      "batch 79, loss: 2.3618, instance_loss: 0.0008, weighted_loss: 1.6535, label: 1, bag_size: 31015\n",
      "batch 99, loss: 0.0002, instance_loss: 0.0048, weighted_loss: 0.0016, label: 1, bag_size: 11509\n",
      "batch 119, loss: 0.0110, instance_loss: 0.0012, weighted_loss: 0.0080, label: 0, bag_size: 75996\n",
      "batch 139, loss: 0.0070, instance_loss: 0.0079, weighted_loss: 0.0073, label: 2, bag_size: 34408\n",
      "batch 159, loss: 0.2338, instance_loss: 0.0023, weighted_loss: 0.1643, label: 1, bag_size: 56911\n",
      "batch 179, loss: 1.8990, instance_loss: 0.0044, weighted_loss: 1.3306, label: 1, bag_size: 42983\n",
      "batch 199, loss: 0.1408, instance_loss: 0.0006, weighted_loss: 0.0987, label: 1, bag_size: 74882\n",
      "batch 219, loss: 0.0075, instance_loss: 0.0092, weighted_loss: 0.0080, label: 2, bag_size: 11119\n",
      "batch 239, loss: 0.0002, instance_loss: 0.0041, weighted_loss: 0.0014, label: 2, bag_size: 84436\n",
      "batch 259, loss: 0.0002, instance_loss: 0.0041, weighted_loss: 0.0014, label: 0, bag_size: 11462\n",
      "batch 279, loss: 0.2697, instance_loss: 0.0007, weighted_loss: 0.1890, label: 1, bag_size: 32084\n",
      "batch 299, loss: 0.0086, instance_loss: 0.0053, weighted_loss: 0.0076, label: 2, bag_size: 63921\n",
      "batch 319, loss: 0.0005, instance_loss: 0.0027, weighted_loss: 0.0012, label: 2, bag_size: 49458\n",
      "batch 339, loss: 0.0016, instance_loss: 0.0003, weighted_loss: 0.0012, label: 0, bag_size: 75475\n",
      "batch 359, loss: 0.0128, instance_loss: 0.0016, weighted_loss: 0.0094, label: 1, bag_size: 45070\n",
      "batch 379, loss: 0.0329, instance_loss: 0.0190, weighted_loss: 0.0288, label: 2, bag_size: 50246\n",
      "batch 399, loss: 0.0150, instance_loss: 0.0137, weighted_loss: 0.0146, label: 1, bag_size: 15902\n",
      "batch 419, loss: 0.0313, instance_loss: 0.0011, weighted_loss: 0.0222, label: 0, bag_size: 90654\n",
      "batch 439, loss: 0.9347, instance_loss: 0.0125, weighted_loss: 0.6580, label: 0, bag_size: 41723\n",
      "batch 459, loss: 0.0073, instance_loss: 0.0011, weighted_loss: 0.0054, label: 0, bag_size: 71326\n",
      "batch 479, loss: 0.0045, instance_loss: 0.0014, weighted_loss: 0.0036, label: 1, bag_size: 84060\n",
      "batch 499, loss: 0.0001, instance_loss: 0.0018, weighted_loss: 0.0006, label: 0, bag_size: 9374\n",
      "batch 519, loss: 0.0367, instance_loss: 0.0043, weighted_loss: 0.0270, label: 2, bag_size: 30615\n",
      "batch 539, loss: 0.0003, instance_loss: 0.0047, weighted_loss: 0.0017, label: 1, bag_size: 17958\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.9995437956204379: correct 4382/4384\n",
      "Epoch: 16, train_loss: 0.1538, train_clustering_loss:  0.0066, train_error: 0.0620\n",
      "class 0: acc 0.9101123595505618, correct 162/178\n",
      "class 1: acc 0.9281767955801105, correct 168/181\n",
      "class 2: acc 0.9735449735449735, correct 184/189\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4232, val_error: 0.1311, auc: 0.9652\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8076923076923077, correct 21/26\n",
      "class 1: acc 0.8636363636363636, correct 19/22\n",
      "class 2: acc 1.0, correct 13/13\n",
      "EarlyStopping counter: 8 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0015, instance_loss: 0.0086, weighted_loss: 0.0036, label: 2, bag_size: 46661\n",
      "batch 39, loss: 0.0010, instance_loss: 0.0012, weighted_loss: 0.0011, label: 2, bag_size: 48881\n",
      "batch 59, loss: 1.5951, instance_loss: 0.0047, weighted_loss: 1.1180, label: 0, bag_size: 76865\n",
      "batch 79, loss: 0.0019, instance_loss: 0.0025, weighted_loss: 0.0021, label: 2, bag_size: 51251\n",
      "batch 99, loss: 0.0029, instance_loss: 0.0100, weighted_loss: 0.0051, label: 0, bag_size: 12044\n",
      "batch 119, loss: 0.0071, instance_loss: 0.0036, weighted_loss: 0.0060, label: 1, bag_size: 8990\n",
      "batch 139, loss: 0.0004, instance_loss: 0.0017, weighted_loss: 0.0008, label: 0, bag_size: 70313\n",
      "batch 159, loss: 0.0822, instance_loss: 0.0003, weighted_loss: 0.0576, label: 1, bag_size: 75744\n",
      "batch 179, loss: 0.0056, instance_loss: 0.0036, weighted_loss: 0.0050, label: 2, bag_size: 38471\n",
      "batch 199, loss: 0.0073, instance_loss: 0.0017, weighted_loss: 0.0056, label: 1, bag_size: 32084\n",
      "batch 219, loss: 0.0002, instance_loss: 0.0019, weighted_loss: 0.0007, label: 0, bag_size: 54512\n",
      "batch 239, loss: 0.0003, instance_loss: 0.0002, weighted_loss: 0.0003, label: 1, bag_size: 64851\n",
      "batch 259, loss: 0.0007, instance_loss: 0.0009, weighted_loss: 0.0007, label: 2, bag_size: 95428\n",
      "batch 279, loss: 0.0909, instance_loss: 0.0019, weighted_loss: 0.0642, label: 1, bag_size: 65195\n",
      "batch 299, loss: 0.0025, instance_loss: 0.0014, weighted_loss: 0.0022, label: 2, bag_size: 72966\n",
      "batch 319, loss: 0.6790, instance_loss: 0.0008, weighted_loss: 0.4755, label: 1, bag_size: 96719\n",
      "batch 339, loss: 2.3524, instance_loss: 0.0087, weighted_loss: 1.6493, label: 1, bag_size: 30632\n",
      "batch 359, loss: 0.0098, instance_loss: 0.0029, weighted_loss: 0.0077, label: 2, bag_size: 48881\n",
      "batch 379, loss: 0.1014, instance_loss: 0.0017, weighted_loss: 0.0715, label: 0, bag_size: 43067\n",
      "batch 399, loss: 0.0302, instance_loss: 0.0008, weighted_loss: 0.0214, label: 1, bag_size: 114116\n",
      "batch 419, loss: 0.0163, instance_loss: 0.0031, weighted_loss: 0.0124, label: 2, bag_size: 65317\n",
      "batch 439, loss: 0.0036, instance_loss: 0.0028, weighted_loss: 0.0034, label: 0, bag_size: 43598\n",
      "batch 459, loss: 0.0026, instance_loss: 0.0058, weighted_loss: 0.0036, label: 2, bag_size: 30615\n",
      "batch 479, loss: 0.6689, instance_loss: 0.0033, weighted_loss: 0.4692, label: 0, bag_size: 66882\n",
      "batch 499, loss: 0.0104, instance_loss: 0.0030, weighted_loss: 0.0082, label: 1, bag_size: 96881\n",
      "batch 519, loss: 0.1378, instance_loss: 0.0072, weighted_loss: 0.0986, label: 1, bag_size: 18108\n",
      "batch 539, loss: 0.0104, instance_loss: 0.0031, weighted_loss: 0.0082, label: 1, bag_size: 33537\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.999771897810219: correct 4383/4384\n",
      "Epoch: 17, train_loss: 0.1028, train_clustering_loss:  0.0056, train_error: 0.0347\n",
      "class 0: acc 0.9444444444444444, correct 170/180\n",
      "class 1: acc 0.9576719576719577, correct 181/189\n",
      "class 2: acc 0.994413407821229, correct 178/179\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4141, val_error: 0.0984, auc: 0.9635\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8461538461538461, correct 22/26\n",
      "class 1: acc 0.9090909090909091, correct 20/22\n",
      "class 2: acc 1.0, correct 13/13\n",
      "EarlyStopping counter: 9 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0523, instance_loss: 0.0068, weighted_loss: 0.0387, label: 2, bag_size: 50246\n",
      "batch 39, loss: 0.9532, instance_loss: 0.0083, weighted_loss: 0.6697, label: 2, bag_size: 23841\n",
      "batch 59, loss: 0.2680, instance_loss: 0.0085, weighted_loss: 0.1901, label: 0, bag_size: 28029\n",
      "batch 79, loss: 0.0008, instance_loss: 0.0062, weighted_loss: 0.0024, label: 2, bag_size: 95428\n",
      "batch 99, loss: 0.0466, instance_loss: 0.0006, weighted_loss: 0.0328, label: 1, bag_size: 16753\n",
      "batch 119, loss: 0.0015, instance_loss: 0.0028, weighted_loss: 0.0019, label: 0, bag_size: 23506\n",
      "batch 139, loss: 0.1502, instance_loss: 0.0042, weighted_loss: 0.1064, label: 1, bag_size: 18108\n",
      "batch 159, loss: 0.0012, instance_loss: 0.0029, weighted_loss: 0.0017, label: 2, bag_size: 66023\n",
      "batch 179, loss: 0.0023, instance_loss: 0.0011, weighted_loss: 0.0019, label: 0, bag_size: 12884\n",
      "batch 199, loss: 1.0899, instance_loss: 0.0041, weighted_loss: 0.7641, label: 1, bag_size: 79573\n",
      "batch 219, loss: 0.6180, instance_loss: 0.0056, weighted_loss: 0.4343, label: 1, bag_size: 23736\n",
      "batch 239, loss: 0.0003, instance_loss: 0.0025, weighted_loss: 0.0010, label: 1, bag_size: 119731\n",
      "batch 259, loss: 0.5226, instance_loss: 0.0036, weighted_loss: 0.3669, label: 2, bag_size: 64675\n",
      "batch 279, loss: 0.1729, instance_loss: 0.0028, weighted_loss: 0.1218, label: 0, bag_size: 60828\n",
      "batch 299, loss: 0.0003, instance_loss: 0.0107, weighted_loss: 0.0034, label: 2, bag_size: 68187\n",
      "batch 319, loss: 0.7889, instance_loss: 0.0055, weighted_loss: 0.5539, label: 0, bag_size: 66469\n",
      "batch 339, loss: 0.0787, instance_loss: 0.0081, weighted_loss: 0.0575, label: 1, bag_size: 21267\n",
      "batch 359, loss: 0.1950, instance_loss: 0.0032, weighted_loss: 0.1375, label: 1, bag_size: 74256\n",
      "batch 379, loss: 0.1556, instance_loss: 0.0060, weighted_loss: 0.1108, label: 2, bag_size: 50246\n",
      "batch 399, loss: 0.0067, instance_loss: 0.0128, weighted_loss: 0.0085, label: 2, bag_size: 63921\n",
      "batch 419, loss: 0.0564, instance_loss: 0.0142, weighted_loss: 0.0438, label: 2, bag_size: 19057\n",
      "batch 439, loss: 0.0027, instance_loss: 0.0028, weighted_loss: 0.0027, label: 0, bag_size: 58267\n",
      "batch 459, loss: 0.0035, instance_loss: 0.0013, weighted_loss: 0.0028, label: 0, bag_size: 76572\n",
      "batch 479, loss: 0.1156, instance_loss: 0.0189, weighted_loss: 0.0866, label: 1, bag_size: 41704\n",
      "batch 499, loss: 0.5394, instance_loss: 0.0116, weighted_loss: 0.3810, label: 2, bag_size: 12043\n",
      "batch 519, loss: 0.0002, instance_loss: 0.0014, weighted_loss: 0.0005, label: 1, bag_size: 67008\n",
      "batch 539, loss: 0.1215, instance_loss: 0.0007, weighted_loss: 0.0853, label: 2, bag_size: 51554\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 18, train_loss: 0.1565, train_clustering_loss:  0.0063, train_error: 0.0602\n",
      "class 0: acc 0.912568306010929, correct 167/183\n",
      "class 1: acc 0.9289617486338798, correct 170/183\n",
      "class 2: acc 0.978021978021978, correct 178/182\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3465, val_error: 0.1148, auc: 0.9665\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8461538461538461, correct 22/26\n",
      "class 1: acc 0.8636363636363636, correct 19/22\n",
      "class 2: acc 1.0, correct 13/13\n",
      "EarlyStopping counter: 10 out of 10\n",
      "Early stopping\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.9615384615384617\n",
      "0.9638694638694638\n",
      "0.9935897435897435\n",
      "Val error: 0.0820, ROC AUC: 0.9730\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.9897560975609756\n",
      "0.9838709677419355\n",
      "0.9978902953586498\n",
      "Test error: 0.0440, ROC AUC: 0.9905\n",
      "class 0: acc 0.94, correct 47/50\n",
      "class 1: acc 0.9655172413793104, correct 28/29\n",
      "class 2: acc 1.0, correct 12/12\n",
      "   Unnamed: 0       case_id  \\\n",
      "0           0  TCGA-4A-A93X   \n",
      "1           1  TCGA-B3-4104   \n",
      "2           2  TCGA-BP-4963   \n",
      "3           3  TCGA-BP-5170   \n",
      "4           4  TCGA-BP-5175   \n",
      "\n",
      "                                            slide_id oncotree_code site   age  \\\n",
      "0  TCGA-4A-A93X-01Z-00-DX2.45011BF1-FED8-4D22-B5E...          PRCC   4A  58.0   \n",
      "1  TCGA-B3-4104-01Z-00-DX1.0783e269-2e8a-4f32-b91...          PRCC   B3  75.0   \n",
      "2  TCGA-BP-4963-01Z-00-DX1.7e206961-5271-40d3-a96...         CCRCC   BP  63.0   \n",
      "3  TCGA-BP-5170-01Z-00-DX1.ae43bef7-3d81-4f69-be3...         CCRCC   BP  55.0   \n",
      "4  TCGA-BP-5175-01Z-00-DX1.e954ae94-307c-475e-9f6...         CCRCC   BP  60.0   \n",
      "\n",
      "   survival_months  is_female  censorship race label  \n",
      "0            12.81        0.0         1.0    W     1  \n",
      "1            34.46        0.0         1.0    W     1  \n",
      "2            60.25        0.0         1.0    W     0  \n",
      "3            79.24        0.0         1.0    W     0  \n",
      "4            30.62        0.0         1.0    W     0  \n",
      "Traing Data Size ({1.00}): 548 ./RCC/FEATURES_DIRECTORY_beph/pt_files/\n",
      "Cluster file missing\n",
      "Traing Data Size ({1.00}): 61 ./RCC/FEATURES_DIRECTORY_beph/pt_files/\n",
      "Cluster file missing\n",
      "Traing Data Size ({1.00}): 91 ./RCC/FEATURES_DIRECTORY_beph/pt_files/\n",
      "Cluster file missing\n",
      "\n",
      "Training Fold 2!\n",
      "\n",
      "Init train/val/test splits... \n",
      "Done!\n",
      "Training on 548 samples\n",
      "Validating on 61 samples\n",
      "Testing on 91 samples\n",
      "\n",
      "Init loss function... Done!\n",
      "\n",
      "Init Model... Setting tau to 1.0\n",
      "Done!\n",
      "CLAM_SB(\n",
      "  (attention_net): Sequential(\n",
      "    (0): Linear(in_features=384, out_features=384, bias=True)\n",
      "    (1): ReLU()\n",
      "    (2): Dropout(p=0.25, inplace=False)\n",
      "    (3): Attn_Net_Gated(\n",
      "      (attention_a): Sequential(\n",
      "        (0): Linear(in_features=384, out_features=256, bias=True)\n",
      "        (1): Tanh()\n",
      "        (2): Dropout(p=0.25, inplace=False)\n",
      "      )\n",
      "      (attention_b): Sequential(\n",
      "        (0): Linear(in_features=384, out_features=256, bias=True)\n",
      "        (1): Sigmoid()\n",
      "        (2): Dropout(p=0.25, inplace=False)\n",
      "      )\n",
      "      (attention_c): Linear(in_features=256, out_features=1, bias=True)\n",
      "    )\n",
      "  )\n",
      "  (feature_linear1): Linear(in_features=768, out_features=384, bias=True)\n",
      "  (classifiers): Linear(in_features=384, out_features=3, bias=True)\n",
      "  (instance_classifiers): ModuleList(\n",
      "    (0): Linear(in_features=384, out_features=2, bias=True)\n",
      "    (1): Linear(in_features=384, out_features=2, bias=True)\n",
      "    (2): Linear(in_features=384, out_features=2, bias=True)\n",
      "  )\n",
      "  (instance_loss_fn): SmoothTop1SVM()\n",
      ")\n",
      "Total number of parameters: 643978\n",
      "Total number of trainable parameters: 643978\n",
      "\n",
      "Init optimizer ... Done!\n",
      "\n",
      "Init Loaders... Done!\n",
      "\n",
      "Setup EarlyStopping... Done!\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.8771, instance_loss: 1.3225, weighted_loss: 1.0107, label: 2, bag_size: 68187\n",
      "batch 39, loss: 0.8353, instance_loss: 0.9533, weighted_loss: 0.8707, label: 0, bag_size: 58969\n",
      "batch 59, loss: 0.2868, instance_loss: 0.5707, weighted_loss: 0.3719, label: 0, bag_size: 18166\n",
      "batch 79, loss: 1.7353, instance_loss: 1.1215, weighted_loss: 1.5512, label: 1, bag_size: 72255\n",
      "batch 99, loss: 0.2436, instance_loss: 0.9352, weighted_loss: 0.4511, label: 2, bag_size: 30615\n",
      "batch 119, loss: 0.3147, instance_loss: 0.1853, weighted_loss: 0.2759, label: 0, bag_size: 86416\n",
      "batch 139, loss: 0.7552, instance_loss: 0.5653, weighted_loss: 0.6982, label: 0, bag_size: 34548\n",
      "batch 159, loss: 1.2119, instance_loss: 0.5145, weighted_loss: 1.0027, label: 0, bag_size: 66469\n",
      "batch 179, loss: 0.3625, instance_loss: 0.2200, weighted_loss: 0.3197, label: 2, bag_size: 66345\n",
      "batch 199, loss: 0.3801, instance_loss: 0.1801, weighted_loss: 0.3201, label: 0, bag_size: 65592\n",
      "batch 219, loss: 0.1599, instance_loss: 0.0723, weighted_loss: 0.1336, label: 0, bag_size: 65407\n",
      "batch 239, loss: 0.7002, instance_loss: 0.3807, weighted_loss: 0.6044, label: 2, bag_size: 51017\n",
      "batch 259, loss: 0.1306, instance_loss: 0.2130, weighted_loss: 0.1553, label: 1, bag_size: 20374\n",
      "batch 279, loss: 0.0763, instance_loss: 0.0798, weighted_loss: 0.0773, label: 2, bag_size: 50045\n",
      "batch 299, loss: 0.0873, instance_loss: 0.0485, weighted_loss: 0.0757, label: 2, bag_size: 54265\n",
      "batch 319, loss: 0.2163, instance_loss: 0.1349, weighted_loss: 0.1919, label: 1, bag_size: 28349\n",
      "batch 339, loss: 0.1172, instance_loss: 0.0169, weighted_loss: 0.0871, label: 1, bag_size: 4771\n",
      "batch 359, loss: 2.3187, instance_loss: 0.1484, weighted_loss: 1.6676, label: 1, bag_size: 74256\n",
      "batch 379, loss: 0.2771, instance_loss: 0.0131, weighted_loss: 0.1979, label: 0, bag_size: 62258\n",
      "batch 399, loss: 0.1248, instance_loss: 0.0497, weighted_loss: 0.1023, label: 1, bag_size: 15902\n",
      "batch 419, loss: 1.0522, instance_loss: 0.2205, weighted_loss: 0.8027, label: 1, bag_size: 49597\n",
      "batch 439, loss: 0.0388, instance_loss: 0.1012, weighted_loss: 0.0575, label: 2, bag_size: 47603\n",
      "batch 459, loss: 0.0395, instance_loss: 0.0198, weighted_loss: 0.0336, label: 0, bag_size: 79446\n",
      "batch 479, loss: 0.9111, instance_loss: 0.0701, weighted_loss: 0.6588, label: 1, bag_size: 78398\n",
      "batch 499, loss: 0.0272, instance_loss: 0.0122, weighted_loss: 0.0227, label: 2, bag_size: 68187\n",
      "batch 519, loss: 0.9266, instance_loss: 0.0155, weighted_loss: 0.6533, label: 2, bag_size: 11415\n",
      "batch 539, loss: 0.7220, instance_loss: 0.0085, weighted_loss: 0.5080, label: 2, bag_size: 65340\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.9370437956204379: correct 4108/4384\n",
      "class 1 clustering acc 0.8859489051094891: correct 3884/4384\n",
      "Epoch: 0, train_loss: 0.6515, train_clustering_loss:  0.3583, train_error: 0.2518\n",
      "class 0: acc 0.7575757575757576, correct 150/198\n",
      "class 1: acc 0.6709677419354839, correct 104/155\n",
      "class 2: acc 0.8, correct 156/195\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.5570, val_error: 0.2295, auc: 0.9396\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.918918918918919, correct 34/37\n",
      "class 1: acc 0.47368421052631576, correct 9/19\n",
      "class 2: acc 0.8, correct 4/5\n",
      "Validation loss decreased (inf --> 0.939574).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 1.3966, instance_loss: 0.2884, weighted_loss: 1.0641, label: 2, bag_size: 13963\n",
      "batch 39, loss: 0.0702, instance_loss: 0.0187, weighted_loss: 0.0548, label: 2, bag_size: 79690\n",
      "batch 59, loss: 0.5096, instance_loss: 0.0041, weighted_loss: 0.3580, label: 1, bag_size: 114116\n",
      "batch 79, loss: 0.0529, instance_loss: 0.0124, weighted_loss: 0.0408, label: 1, bag_size: 42941\n",
      "batch 99, loss: 1.5831, instance_loss: 0.0055, weighted_loss: 1.1098, label: 2, bag_size: 50246\n",
      "batch 119, loss: 1.2714, instance_loss: 0.0043, weighted_loss: 0.8912, label: 2, bag_size: 16282\n",
      "batch 139, loss: 0.0787, instance_loss: 0.0087, weighted_loss: 0.0577, label: 2, bag_size: 72015\n",
      "batch 159, loss: 0.0783, instance_loss: 0.0042, weighted_loss: 0.0561, label: 2, bag_size: 82484\n",
      "batch 179, loss: 0.0186, instance_loss: 0.0316, weighted_loss: 0.0225, label: 0, bag_size: 47840\n",
      "batch 199, loss: 0.6727, instance_loss: 0.0033, weighted_loss: 0.4719, label: 1, bag_size: 36784\n",
      "batch 219, loss: 0.0487, instance_loss: 0.0011, weighted_loss: 0.0344, label: 0, bag_size: 90654\n",
      "batch 239, loss: 0.3420, instance_loss: 0.0033, weighted_loss: 0.2404, label: 2, bag_size: 87135\n",
      "batch 259, loss: 0.1865, instance_loss: 0.0049, weighted_loss: 0.1320, label: 0, bag_size: 94524\n",
      "batch 279, loss: 0.6470, instance_loss: 0.0056, weighted_loss: 0.4546, label: 0, bag_size: 27818\n",
      "batch 299, loss: 0.0006, instance_loss: 0.0628, weighted_loss: 0.0193, label: 0, bag_size: 9867\n",
      "batch 319, loss: 0.0786, instance_loss: 0.0088, weighted_loss: 0.0577, label: 0, bag_size: 44730\n",
      "batch 339, loss: 0.0098, instance_loss: 0.0148, weighted_loss: 0.0113, label: 1, bag_size: 44760\n",
      "batch 359, loss: 0.0096, instance_loss: 0.0083, weighted_loss: 0.0092, label: 0, bag_size: 16262\n",
      "batch 379, loss: 0.0401, instance_loss: 0.0064, weighted_loss: 0.0300, label: 1, bag_size: 19703\n",
      "batch 399, loss: 0.0063, instance_loss: 0.0717, weighted_loss: 0.0260, label: 1, bag_size: 20374\n",
      "batch 419, loss: 0.2045, instance_loss: 0.0015, weighted_loss: 0.1436, label: 2, bag_size: 47138\n",
      "batch 439, loss: 1.4618, instance_loss: 0.0121, weighted_loss: 1.0269, label: 1, bag_size: 72255\n",
      "batch 459, loss: 3.8506, instance_loss: 0.0033, weighted_loss: 2.6964, label: 0, bag_size: 53401\n",
      "batch 479, loss: 3.2179, instance_loss: 0.0485, weighted_loss: 2.2671, label: 1, bag_size: 30632\n",
      "batch 499, loss: 1.1803, instance_loss: 0.0060, weighted_loss: 0.8280, label: 0, bag_size: 74863\n",
      "batch 519, loss: 0.1035, instance_loss: 0.0022, weighted_loss: 0.0731, label: 2, bag_size: 103823\n",
      "batch 539, loss: 1.3947, instance_loss: 0.0371, weighted_loss: 0.9874, label: 2, bag_size: 52584\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.9984032846715328: correct 4377/4384\n",
      "class 1 clustering acc 0.999315693430657: correct 4381/4384\n",
      "Epoch: 1, train_loss: 0.4227, train_clustering_loss:  0.0205, train_error: 0.1734\n",
      "class 0: acc 0.8324607329842932, correct 159/191\n",
      "class 1: acc 0.8210526315789474, correct 156/190\n",
      "class 2: acc 0.8263473053892215, correct 138/167\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4912, val_error: 0.2131, auc: 0.9640\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8918918918918919, correct 33/37\n",
      "class 1: acc 0.5263157894736842, correct 10/19\n",
      "class 2: acc 1.0, correct 5/5\n",
      "Validation loss decreased (0.939574 --> 0.964002).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.1128, instance_loss: 0.0108, weighted_loss: 0.0822, label: 2, bag_size: 58111\n",
      "batch 39, loss: 0.0280, instance_loss: 0.0033, weighted_loss: 0.0206, label: 2, bag_size: 103823\n",
      "batch 59, loss: 0.0534, instance_loss: 0.0204, weighted_loss: 0.0435, label: 1, bag_size: 45070\n",
      "batch 79, loss: 0.0726, instance_loss: 0.0369, weighted_loss: 0.0619, label: 2, bag_size: 79690\n",
      "batch 99, loss: 0.0094, instance_loss: 0.0023, weighted_loss: 0.0073, label: 2, bag_size: 68187\n",
      "batch 119, loss: 0.0574, instance_loss: 0.0013, weighted_loss: 0.0406, label: 2, bag_size: 34408\n",
      "batch 139, loss: 0.0391, instance_loss: 0.0018, weighted_loss: 0.0279, label: 1, bag_size: 81168\n",
      "batch 159, loss: 0.1122, instance_loss: 0.0023, weighted_loss: 0.0792, label: 1, bag_size: 56317\n",
      "batch 179, loss: 0.0979, instance_loss: 0.0023, weighted_loss: 0.0692, label: 2, bag_size: 65340\n",
      "batch 199, loss: 0.0668, instance_loss: 0.0060, weighted_loss: 0.0486, label: 2, bag_size: 54040\n",
      "batch 219, loss: 0.9582, instance_loss: 0.0010, weighted_loss: 0.6710, label: 1, bag_size: 74256\n",
      "batch 239, loss: 0.5590, instance_loss: 0.0019, weighted_loss: 0.3918, label: 2, bag_size: 64675\n",
      "batch 259, loss: 0.0307, instance_loss: 0.0033, weighted_loss: 0.0225, label: 2, bag_size: 39508\n",
      "batch 279, loss: 0.0367, instance_loss: 0.0212, weighted_loss: 0.0321, label: 2, bag_size: 38471\n",
      "batch 299, loss: 0.0454, instance_loss: 0.0005, weighted_loss: 0.0319, label: 2, bag_size: 50978\n",
      "batch 319, loss: 0.3504, instance_loss: 0.0085, weighted_loss: 0.2479, label: 2, bag_size: 51554\n",
      "batch 339, loss: 0.0026, instance_loss: 0.0196, weighted_loss: 0.0077, label: 1, bag_size: 78923\n",
      "batch 359, loss: 0.0038, instance_loss: 0.0036, weighted_loss: 0.0037, label: 0, bag_size: 86382\n",
      "batch 379, loss: 0.0207, instance_loss: 0.0003, weighted_loss: 0.0145, label: 1, bag_size: 66357\n",
      "batch 399, loss: 0.1382, instance_loss: 0.0162, weighted_loss: 0.1016, label: 0, bag_size: 53181\n",
      "batch 419, loss: 0.0214, instance_loss: 0.0037, weighted_loss: 0.0161, label: 1, bag_size: 57548\n",
      "batch 439, loss: 0.0649, instance_loss: 0.0008, weighted_loss: 0.0457, label: 0, bag_size: 53188\n",
      "batch 459, loss: 0.0400, instance_loss: 0.0768, weighted_loss: 0.0510, label: 2, bag_size: 76037\n",
      "batch 479, loss: 0.9635, instance_loss: 0.0037, weighted_loss: 0.6755, label: 1, bag_size: 61193\n",
      "batch 499, loss: 0.0275, instance_loss: 0.0057, weighted_loss: 0.0209, label: 2, bag_size: 69080\n",
      "batch 519, loss: 0.0383, instance_loss: 0.0079, weighted_loss: 0.0291, label: 0, bag_size: 60828\n",
      "batch 539, loss: 0.0002, instance_loss: 0.0004, weighted_loss: 0.0002, label: 0, bag_size: 65462\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.9981751824817519: correct 4376/4384\n",
      "class 1 clustering acc 0.9979470802919708: correct 4375/4384\n",
      "Epoch: 2, train_loss: 0.2917, train_clustering_loss:  0.0185, train_error: 0.1004\n",
      "class 0: acc 0.8862275449101796, correct 148/167\n",
      "class 1: acc 0.84, correct 147/175\n",
      "class 2: acc 0.9611650485436893, correct 198/206\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3809, val_error: 0.1311, auc: 0.9621\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8918918918918919, correct 33/37\n",
      "class 1: acc 0.8421052631578947, correct 16/19\n",
      "class 2: acc 0.8, correct 4/5\n",
      "EarlyStopping counter: 1 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0800, instance_loss: 0.0021, weighted_loss: 0.0566, label: 1, bag_size: 25323\n",
      "batch 39, loss: 0.0013, instance_loss: 0.0037, weighted_loss: 0.0020, label: 0, bag_size: 55538\n",
      "batch 59, loss: 0.0060, instance_loss: 0.0231, weighted_loss: 0.0111, label: 0, bag_size: 9874\n",
      "batch 79, loss: 0.3572, instance_loss: 0.0010, weighted_loss: 0.2503, label: 0, bag_size: 63845\n",
      "batch 99, loss: 0.7911, instance_loss: 0.0161, weighted_loss: 0.5586, label: 0, bag_size: 15052\n",
      "batch 119, loss: 1.3964, instance_loss: 0.0011, weighted_loss: 0.9778, label: 1, bag_size: 112356\n",
      "batch 139, loss: 0.3694, instance_loss: 0.0047, weighted_loss: 0.2600, label: 2, bag_size: 91267\n",
      "batch 159, loss: 0.0064, instance_loss: 0.1427, weighted_loss: 0.0473, label: 1, bag_size: 12098\n",
      "batch 179, loss: 0.0202, instance_loss: 0.0007, weighted_loss: 0.0143, label: 1, bag_size: 45270\n",
      "batch 199, loss: 0.0903, instance_loss: 0.0028, weighted_loss: 0.0640, label: 1, bag_size: 115202\n",
      "batch 219, loss: 0.0116, instance_loss: 0.0012, weighted_loss: 0.0085, label: 0, bag_size: 61317\n",
      "batch 239, loss: 0.1735, instance_loss: 0.0017, weighted_loss: 0.1219, label: 0, bag_size: 70854\n",
      "batch 259, loss: 0.0322, instance_loss: 0.0022, weighted_loss: 0.0232, label: 2, bag_size: 54040\n",
      "batch 279, loss: 0.0655, instance_loss: 0.0158, weighted_loss: 0.0506, label: 1, bag_size: 12022\n",
      "batch 299, loss: 0.0045, instance_loss: 0.0000, weighted_loss: 0.0032, label: 0, bag_size: 61195\n",
      "batch 319, loss: 0.0031, instance_loss: 0.0012, weighted_loss: 0.0026, label: 0, bag_size: 48589\n",
      "batch 339, loss: 0.0737, instance_loss: 0.0077, weighted_loss: 0.0539, label: 2, bag_size: 51316\n",
      "batch 359, loss: 0.3265, instance_loss: 0.0033, weighted_loss: 0.2295, label: 0, bag_size: 58702\n",
      "batch 379, loss: 0.0748, instance_loss: 0.0350, weighted_loss: 0.0629, label: 1, bag_size: 44591\n",
      "batch 399, loss: 0.0140, instance_loss: 0.0121, weighted_loss: 0.0135, label: 0, bag_size: 16213\n",
      "batch 419, loss: 0.0027, instance_loss: 0.0581, weighted_loss: 0.0193, label: 0, bag_size: 13674\n",
      "batch 439, loss: 2.4939, instance_loss: 0.0206, weighted_loss: 1.7520, label: 0, bag_size: 16783\n",
      "batch 459, loss: 0.0132, instance_loss: 0.0023, weighted_loss: 0.0099, label: 1, bag_size: 10935\n",
      "batch 479, loss: 1.4270, instance_loss: 0.0037, weighted_loss: 1.0000, label: 1, bag_size: 5002\n",
      "batch 499, loss: 0.0010, instance_loss: 0.0000, weighted_loss: 0.0007, label: 1, bag_size: 55270\n",
      "batch 519, loss: 0.2109, instance_loss: 0.0075, weighted_loss: 0.1499, label: 1, bag_size: 27666\n",
      "batch 539, loss: 0.1810, instance_loss: 0.0000, weighted_loss: 0.1267, label: 2, bag_size: 59224\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.9988594890510949: correct 4379/4384\n",
      "class 1 clustering acc 0.9995437956204379: correct 4382/4384\n",
      "Epoch: 3, train_loss: 0.2281, train_clustering_loss:  0.0116, train_error: 0.0821\n",
      "class 0: acc 0.9080459770114943, correct 158/174\n",
      "class 1: acc 0.8688524590163934, correct 159/183\n",
      "class 2: acc 0.9738219895287958, correct 186/191\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4375, val_error: 0.1803, auc: 0.9610\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.918918918918919, correct 34/37\n",
      "class 1: acc 0.631578947368421, correct 12/19\n",
      "class 2: acc 0.8, correct 4/5\n",
      "EarlyStopping counter: 2 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.1243, instance_loss: 0.0000, weighted_loss: 0.0870, label: 1, bag_size: 108492\n",
      "batch 39, loss: 0.0098, instance_loss: 0.0010, weighted_loss: 0.0072, label: 2, bag_size: 40315\n",
      "batch 59, loss: 0.4031, instance_loss: 0.0446, weighted_loss: 0.2955, label: 1, bag_size: 54398\n",
      "batch 79, loss: 0.0092, instance_loss: 0.0017, weighted_loss: 0.0069, label: 2, bag_size: 49458\n",
      "batch 99, loss: 0.1055, instance_loss: 0.0016, weighted_loss: 0.0744, label: 0, bag_size: 66469\n",
      "batch 119, loss: 0.4557, instance_loss: 0.0024, weighted_loss: 0.3198, label: 2, bag_size: 71567\n",
      "batch 139, loss: 2.9914, instance_loss: 0.0030, weighted_loss: 2.0949, label: 1, bag_size: 96719\n",
      "batch 159, loss: 0.0101, instance_loss: 0.0004, weighted_loss: 0.0072, label: 0, bag_size: 33045\n",
      "batch 179, loss: 0.1612, instance_loss: 0.0028, weighted_loss: 0.1137, label: 2, bag_size: 51554\n",
      "batch 199, loss: 0.0312, instance_loss: 0.0219, weighted_loss: 0.0284, label: 2, bag_size: 1329\n",
      "batch 219, loss: 0.0228, instance_loss: 0.0035, weighted_loss: 0.0170, label: 0, bag_size: 114320\n",
      "batch 239, loss: 0.0752, instance_loss: 0.0045, weighted_loss: 0.0540, label: 2, bag_size: 51554\n",
      "batch 259, loss: 0.0705, instance_loss: 0.0014, weighted_loss: 0.0498, label: 0, bag_size: 38922\n",
      "batch 279, loss: 0.5593, instance_loss: 0.0024, weighted_loss: 0.3922, label: 2, bag_size: 52924\n",
      "batch 299, loss: 0.0540, instance_loss: 0.0004, weighted_loss: 0.0379, label: 2, bag_size: 103823\n",
      "batch 319, loss: 1.0620, instance_loss: 0.0055, weighted_loss: 0.7451, label: 1, bag_size: 22039\n",
      "batch 339, loss: 0.0064, instance_loss: 0.0032, weighted_loss: 0.0054, label: 2, bag_size: 73189\n",
      "batch 359, loss: 0.5536, instance_loss: 0.0013, weighted_loss: 0.3879, label: 0, bag_size: 90911\n",
      "batch 379, loss: 0.0199, instance_loss: 0.0019, weighted_loss: 0.0145, label: 2, bag_size: 49458\n",
      "batch 399, loss: 0.0024, instance_loss: 0.0150, weighted_loss: 0.0062, label: 1, bag_size: 66357\n",
      "batch 419, loss: 0.1563, instance_loss: 0.0200, weighted_loss: 0.1154, label: 2, bag_size: 5173\n",
      "batch 439, loss: 0.0024, instance_loss: 0.0022, weighted_loss: 0.0023, label: 0, bag_size: 125590\n",
      "batch 459, loss: 0.0007, instance_loss: 0.0020, weighted_loss: 0.0011, label: 0, bag_size: 73829\n",
      "batch 479, loss: 0.1385, instance_loss: 0.0024, weighted_loss: 0.0977, label: 0, bag_size: 53043\n",
      "batch 499, loss: 0.0222, instance_loss: 0.0108, weighted_loss: 0.0188, label: 1, bag_size: 54398\n",
      "batch 519, loss: 0.0270, instance_loss: 0.0031, weighted_loss: 0.0198, label: 1, bag_size: 44591\n",
      "batch 539, loss: 0.0460, instance_loss: 0.0000, weighted_loss: 0.0322, label: 1, bag_size: 108492\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.9995437956204379: correct 4382/4384\n",
      "Epoch: 4, train_loss: 0.2259, train_clustering_loss:  0.0093, train_error: 0.0748\n",
      "class 0: acc 0.8936170212765957, correct 168/188\n",
      "class 1: acc 0.9017341040462428, correct 156/173\n",
      "class 2: acc 0.9786096256684492, correct 183/187\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4122, val_error: 0.1475, auc: 0.9729\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8918918918918919, correct 33/37\n",
      "class 1: acc 0.7894736842105263, correct 15/19\n",
      "class 2: acc 0.8, correct 4/5\n",
      "Validation loss decreased (0.964002 --> 0.972890).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0051, instance_loss: 0.0005, weighted_loss: 0.0037, label: 1, bag_size: 42941\n",
      "batch 39, loss: 0.0021, instance_loss: 0.0002, weighted_loss: 0.0015, label: 2, bag_size: 38471\n",
      "batch 59, loss: 0.1083, instance_loss: 0.0009, weighted_loss: 0.0761, label: 2, bag_size: 52924\n",
      "batch 79, loss: 0.0220, instance_loss: 0.0012, weighted_loss: 0.0157, label: 2, bag_size: 69080\n",
      "batch 99, loss: 0.0753, instance_loss: 0.0017, weighted_loss: 0.0532, label: 1, bag_size: 46625\n",
      "batch 119, loss: 0.0056, instance_loss: 0.0203, weighted_loss: 0.0100, label: 0, bag_size: 67598\n",
      "batch 139, loss: 0.0072, instance_loss: 0.0002, weighted_loss: 0.0051, label: 2, bag_size: 72686\n",
      "batch 159, loss: 0.2303, instance_loss: 0.0025, weighted_loss: 0.1619, label: 0, bag_size: 52261\n",
      "batch 179, loss: 0.0081, instance_loss: 0.0019, weighted_loss: 0.0062, label: 1, bag_size: 53245\n",
      "batch 199, loss: 0.7944, instance_loss: 0.0924, weighted_loss: 0.5838, label: 0, bag_size: 30355\n",
      "batch 219, loss: 0.0421, instance_loss: 0.0092, weighted_loss: 0.0323, label: 2, bag_size: 69080\n",
      "batch 239, loss: 0.4720, instance_loss: 0.0036, weighted_loss: 0.3315, label: 1, bag_size: 22039\n",
      "batch 259, loss: 0.0416, instance_loss: 0.0020, weighted_loss: 0.0297, label: 1, bag_size: 91901\n",
      "batch 279, loss: 0.0021, instance_loss: 0.0022, weighted_loss: 0.0021, label: 0, bag_size: 13926\n",
      "batch 299, loss: 0.0001, instance_loss: 0.0002, weighted_loss: 0.0001, label: 0, bag_size: 69160\n",
      "batch 319, loss: 0.0533, instance_loss: 0.0221, weighted_loss: 0.0439, label: 2, bag_size: 11415\n",
      "batch 339, loss: 0.2348, instance_loss: 0.0062, weighted_loss: 0.1662, label: 2, bag_size: 10243\n",
      "batch 359, loss: 4.7301, instance_loss: 0.0083, weighted_loss: 3.3136, label: 0, bag_size: 59783\n",
      "batch 379, loss: 0.0057, instance_loss: 0.0080, weighted_loss: 0.0064, label: 1, bag_size: 44760\n",
      "batch 399, loss: 0.0196, instance_loss: 0.0008, weighted_loss: 0.0139, label: 0, bag_size: 67742\n",
      "batch 419, loss: 0.0533, instance_loss: 0.0036, weighted_loss: 0.0384, label: 2, bag_size: 63921\n",
      "batch 439, loss: 0.0001, instance_loss: 0.0016, weighted_loss: 0.0005, label: 1, bag_size: 28713\n",
      "batch 459, loss: 0.0006, instance_loss: 0.0005, weighted_loss: 0.0006, label: 0, bag_size: 71332\n",
      "batch 479, loss: 0.0034, instance_loss: 0.0065, weighted_loss: 0.0043, label: 2, bag_size: 69080\n",
      "batch 499, loss: 0.0040, instance_loss: 0.0026, weighted_loss: 0.0036, label: 2, bag_size: 51316\n",
      "batch 519, loss: 0.0596, instance_loss: 0.0029, weighted_loss: 0.0426, label: 1, bag_size: 39770\n",
      "batch 539, loss: 0.0078, instance_loss: 0.0030, weighted_loss: 0.0064, label: 1, bag_size: 28349\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.9995437956204379: correct 4382/4384\n",
      "Epoch: 5, train_loss: 0.1980, train_clustering_loss:  0.0067, train_error: 0.0712\n",
      "class 0: acc 0.8941176470588236, correct 152/170\n",
      "class 1: acc 0.9222797927461139, correct 178/193\n",
      "class 2: acc 0.9675675675675676, correct 179/185\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3816, val_error: 0.0984, auc: 0.9643\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8918918918918919, correct 33/37\n",
      "class 1: acc 0.9473684210526315, correct 18/19\n",
      "class 2: acc 0.8, correct 4/5\n",
      "EarlyStopping counter: 1 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0701, instance_loss: 0.0032, weighted_loss: 0.0500, label: 0, bag_size: 51831\n",
      "batch 39, loss: 0.0907, instance_loss: 0.0061, weighted_loss: 0.0653, label: 0, bag_size: 67027\n",
      "batch 59, loss: 0.0150, instance_loss: 0.0107, weighted_loss: 0.0137, label: 2, bag_size: 39234\n",
      "batch 79, loss: 0.0012, instance_loss: 0.0002, weighted_loss: 0.0009, label: 0, bag_size: 67834\n",
      "batch 99, loss: 0.1032, instance_loss: 0.0167, weighted_loss: 0.0772, label: 0, bag_size: 35391\n",
      "batch 119, loss: 0.0079, instance_loss: 0.0031, weighted_loss: 0.0064, label: 2, bag_size: 36673\n",
      "batch 139, loss: 0.0518, instance_loss: 0.0014, weighted_loss: 0.0367, label: 2, bag_size: 46661\n",
      "batch 159, loss: 0.0013, instance_loss: 0.0044, weighted_loss: 0.0023, label: 0, bag_size: 78549\n",
      "batch 179, loss: 0.0080, instance_loss: 0.0029, weighted_loss: 0.0064, label: 1, bag_size: 7199\n",
      "batch 199, loss: 0.0016, instance_loss: 0.0052, weighted_loss: 0.0027, label: 2, bag_size: 51251\n",
      "batch 219, loss: 0.0023, instance_loss: 0.0036, weighted_loss: 0.0027, label: 2, bag_size: 36978\n",
      "batch 239, loss: 0.0015, instance_loss: 0.0005, weighted_loss: 0.0012, label: 0, bag_size: 92719\n",
      "batch 259, loss: 0.0063, instance_loss: 0.0005, weighted_loss: 0.0045, label: 0, bag_size: 61317\n",
      "batch 279, loss: 2.4857, instance_loss: 0.0003, weighted_loss: 1.7401, label: 1, bag_size: 92502\n",
      "batch 299, loss: 0.0774, instance_loss: 0.0010, weighted_loss: 0.0545, label: 1, bag_size: 62303\n",
      "batch 319, loss: 0.3114, instance_loss: 0.0092, weighted_loss: 0.2207, label: 1, bag_size: 87674\n",
      "batch 339, loss: 0.0162, instance_loss: 0.0017, weighted_loss: 0.0118, label: 0, bag_size: 82955\n",
      "batch 359, loss: 0.0115, instance_loss: 0.0011, weighted_loss: 0.0084, label: 1, bag_size: 49463\n",
      "batch 379, loss: 0.2056, instance_loss: 0.0297, weighted_loss: 0.1528, label: 2, bag_size: 16282\n",
      "batch 399, loss: 0.0125, instance_loss: 0.0047, weighted_loss: 0.0102, label: 2, bag_size: 66801\n",
      "batch 419, loss: 0.6776, instance_loss: 0.0021, weighted_loss: 0.4749, label: 2, bag_size: 50246\n",
      "batch 439, loss: 0.0020, instance_loss: 0.0004, weighted_loss: 0.0015, label: 0, bag_size: 85637\n",
      "batch 459, loss: 0.0036, instance_loss: 0.0065, weighted_loss: 0.0045, label: 2, bag_size: 76037\n",
      "batch 479, loss: 0.0060, instance_loss: 0.0010, weighted_loss: 0.0045, label: 0, bag_size: 59200\n",
      "batch 499, loss: 0.3540, instance_loss: 0.0110, weighted_loss: 0.2511, label: 1, bag_size: 86619\n",
      "batch 519, loss: 0.0101, instance_loss: 0.0060, weighted_loss: 0.0089, label: 2, bag_size: 51251\n",
      "batch 539, loss: 0.0085, instance_loss: 0.0011, weighted_loss: 0.0063, label: 2, bag_size: 49458\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 6, train_loss: 0.2093, train_clustering_loss:  0.0055, train_error: 0.0712\n",
      "class 0: acc 0.917098445595855, correct 177/193\n",
      "class 1: acc 0.9116022099447514, correct 165/181\n",
      "class 2: acc 0.9597701149425287, correct 167/174\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3657, val_error: 0.1475, auc: 0.9742\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8918918918918919, correct 33/37\n",
      "class 1: acc 0.7894736842105263, correct 15/19\n",
      "class 2: acc 0.8, correct 4/5\n",
      "Validation loss decreased (0.972890 --> 0.974158).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0106, instance_loss: 0.0129, weighted_loss: 0.0113, label: 1, bag_size: 8990\n",
      "batch 39, loss: 0.1109, instance_loss: 0.0030, weighted_loss: 0.0785, label: 2, bag_size: 87135\n",
      "batch 59, loss: 0.0006, instance_loss: 0.0118, weighted_loss: 0.0039, label: 0, bag_size: 20910\n",
      "batch 79, loss: 0.0078, instance_loss: 0.0057, weighted_loss: 0.0072, label: 1, bag_size: 15902\n",
      "batch 99, loss: 0.1575, instance_loss: 0.0095, weighted_loss: 0.1131, label: 2, bag_size: 59810\n",
      "batch 119, loss: 0.0005, instance_loss: 0.0000, weighted_loss: 0.0003, label: 0, bag_size: 35639\n",
      "batch 139, loss: 0.0011, instance_loss: 0.0019, weighted_loss: 0.0013, label: 0, bag_size: 80173\n",
      "batch 159, loss: 0.0385, instance_loss: 0.0300, weighted_loss: 0.0360, label: 2, bag_size: 16676\n",
      "batch 179, loss: 0.0002, instance_loss: 0.0029, weighted_loss: 0.0010, label: 0, bag_size: 103177\n",
      "batch 199, loss: 0.3770, instance_loss: 0.0085, weighted_loss: 0.2664, label: 2, bag_size: 13963\n",
      "batch 219, loss: 0.0018, instance_loss: 0.0006, weighted_loss: 0.0014, label: 0, bag_size: 64143\n",
      "batch 239, loss: 1.4045, instance_loss: 0.0012, weighted_loss: 0.9835, label: 1, bag_size: 18108\n",
      "batch 259, loss: 0.0159, instance_loss: 0.0006, weighted_loss: 0.0113, label: 0, bag_size: 44730\n",
      "batch 279, loss: 1.0469, instance_loss: 0.0090, weighted_loss: 0.7355, label: 0, bag_size: 80619\n",
      "batch 299, loss: 0.0714, instance_loss: 0.0405, weighted_loss: 0.0621, label: 0, bag_size: 82655\n",
      "batch 319, loss: 0.0005, instance_loss: 0.0000, weighted_loss: 0.0004, label: 0, bag_size: 72078\n",
      "batch 339, loss: 0.0002, instance_loss: 0.0008, weighted_loss: 0.0004, label: 0, bag_size: 69154\n",
      "batch 359, loss: 1.2089, instance_loss: 0.0007, weighted_loss: 0.8465, label: 2, bag_size: 52924\n",
      "batch 379, loss: 0.0003, instance_loss: 0.0060, weighted_loss: 0.0020, label: 2, bag_size: 73189\n",
      "batch 399, loss: 0.0010, instance_loss: 0.0055, weighted_loss: 0.0023, label: 2, bag_size: 51251\n",
      "batch 419, loss: 0.0086, instance_loss: 0.0041, weighted_loss: 0.0072, label: 1, bag_size: 10520\n",
      "batch 439, loss: 0.0763, instance_loss: 0.0019, weighted_loss: 0.0540, label: 1, bag_size: 108492\n",
      "batch 459, loss: 0.4305, instance_loss: 0.0000, weighted_loss: 0.3014, label: 1, bag_size: 49548\n",
      "batch 479, loss: 0.0348, instance_loss: 0.0031, weighted_loss: 0.0253, label: 2, bag_size: 39508\n",
      "batch 499, loss: 0.0014, instance_loss: 0.0013, weighted_loss: 0.0014, label: 2, bag_size: 36673\n",
      "batch 519, loss: 0.0014, instance_loss: 0.0194, weighted_loss: 0.0068, label: 2, bag_size: 36978\n",
      "batch 539, loss: 0.0460, instance_loss: 0.0036, weighted_loss: 0.0333, label: 1, bag_size: 32072\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 7, train_loss: 0.1625, train_clustering_loss:  0.0062, train_error: 0.0639\n",
      "class 0: acc 0.9055555555555556, correct 163/180\n",
      "class 1: acc 0.9147727272727273, correct 161/176\n",
      "class 2: acc 0.984375, correct 189/192\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4541, val_error: 0.1475, auc: 0.9633\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8918918918918919, correct 33/37\n",
      "class 1: acc 0.7894736842105263, correct 15/19\n",
      "class 2: acc 0.8, correct 4/5\n",
      "EarlyStopping counter: 1 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0060, instance_loss: 0.0031, weighted_loss: 0.0051, label: 0, bag_size: 9874\n",
      "batch 39, loss: 0.6210, instance_loss: 0.0012, weighted_loss: 0.4350, label: 2, bag_size: 52584\n",
      "batch 59, loss: 0.1239, instance_loss: 0.0033, weighted_loss: 0.0877, label: 0, bag_size: 72021\n",
      "batch 79, loss: 0.0681, instance_loss: 0.0042, weighted_loss: 0.0489, label: 0, bag_size: 58349\n",
      "batch 99, loss: 0.0008, instance_loss: 0.0008, weighted_loss: 0.0008, label: 2, bag_size: 69080\n",
      "batch 119, loss: 0.5901, instance_loss: 0.0024, weighted_loss: 0.4138, label: 2, bag_size: 19057\n",
      "batch 139, loss: 0.0008, instance_loss: 0.0036, weighted_loss: 0.0016, label: 2, bag_size: 51017\n",
      "batch 159, loss: 0.2344, instance_loss: 0.0042, weighted_loss: 0.1653, label: 2, bag_size: 47138\n",
      "batch 179, loss: 0.0144, instance_loss: 0.0009, weighted_loss: 0.0103, label: 0, bag_size: 82955\n",
      "batch 199, loss: 0.6787, instance_loss: 0.0028, weighted_loss: 0.4759, label: 0, bag_size: 74863\n",
      "batch 219, loss: 0.4477, instance_loss: 0.0143, weighted_loss: 0.3176, label: 0, bag_size: 16184\n",
      "batch 239, loss: 0.2123, instance_loss: 0.0118, weighted_loss: 0.1522, label: 1, bag_size: 39620\n",
      "batch 259, loss: 0.0027, instance_loss: 0.0076, weighted_loss: 0.0042, label: 1, bag_size: 57548\n",
      "batch 279, loss: 0.2109, instance_loss: 0.0080, weighted_loss: 0.1500, label: 2, bag_size: 39234\n",
      "batch 299, loss: 0.0157, instance_loss: 0.0092, weighted_loss: 0.0138, label: 0, bag_size: 88181\n",
      "batch 319, loss: 0.0004, instance_loss: 0.0023, weighted_loss: 0.0010, label: 1, bag_size: 55270\n",
      "batch 339, loss: 0.0995, instance_loss: 0.0000, weighted_loss: 0.0697, label: 1, bag_size: 49548\n",
      "batch 359, loss: 0.0004, instance_loss: 0.0096, weighted_loss: 0.0031, label: 0, bag_size: 9374\n",
      "batch 379, loss: 0.0029, instance_loss: 0.0013, weighted_loss: 0.0024, label: 1, bag_size: 19703\n",
      "batch 399, loss: 0.2003, instance_loss: 0.0030, weighted_loss: 0.1411, label: 1, bag_size: 102214\n",
      "batch 419, loss: 0.0032, instance_loss: 0.0000, weighted_loss: 0.0023, label: 1, bag_size: 55668\n",
      "batch 439, loss: 0.1608, instance_loss: 0.0065, weighted_loss: 0.1145, label: 2, bag_size: 52584\n",
      "batch 459, loss: 0.0003, instance_loss: 0.0008, weighted_loss: 0.0004, label: 0, bag_size: 77275\n",
      "batch 479, loss: 0.0553, instance_loss: 0.0015, weighted_loss: 0.0392, label: 2, bag_size: 64488\n",
      "batch 499, loss: 0.0007, instance_loss: 0.0015, weighted_loss: 0.0010, label: 0, bag_size: 77275\n",
      "batch 519, loss: 0.0113, instance_loss: 0.0069, weighted_loss: 0.0100, label: 1, bag_size: 2328\n",
      "batch 539, loss: 0.0022, instance_loss: 0.0007, weighted_loss: 0.0018, label: 0, bag_size: 48909\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.9995437956204379: correct 4382/4384\n",
      "Epoch: 8, train_loss: 0.1522, train_clustering_loss:  0.0085, train_error: 0.0511\n",
      "class 0: acc 0.93048128342246, correct 174/187\n",
      "class 1: acc 0.9494949494949495, correct 188/198\n",
      "class 2: acc 0.9693251533742331, correct 158/163\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4506, val_error: 0.1803, auc: 0.9714\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.918918918918919, correct 34/37\n",
      "class 1: acc 0.631578947368421, correct 12/19\n",
      "class 2: acc 0.8, correct 4/5\n",
      "EarlyStopping counter: 2 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0001, instance_loss: 0.0222, weighted_loss: 0.0067, label: 0, bag_size: 41676\n",
      "batch 39, loss: 0.0052, instance_loss: 0.0002, weighted_loss: 0.0037, label: 0, bag_size: 57545\n",
      "batch 59, loss: 0.0022, instance_loss: 0.0000, weighted_loss: 0.0016, label: 0, bag_size: 35391\n",
      "batch 79, loss: 0.0705, instance_loss: 0.0041, weighted_loss: 0.0506, label: 1, bag_size: 49445\n",
      "batch 99, loss: 0.5071, instance_loss: 0.0027, weighted_loss: 0.3558, label: 2, bag_size: 51554\n",
      "batch 119, loss: 0.0010, instance_loss: 0.0057, weighted_loss: 0.0024, label: 2, bag_size: 84436\n",
      "batch 139, loss: 0.0032, instance_loss: 0.0036, weighted_loss: 0.0033, label: 2, bag_size: 51017\n",
      "batch 159, loss: 0.0018, instance_loss: 0.0049, weighted_loss: 0.0027, label: 0, bag_size: 14481\n",
      "batch 179, loss: 0.0245, instance_loss: 0.0123, weighted_loss: 0.0208, label: 1, bag_size: 62126\n",
      "batch 199, loss: 0.0009, instance_loss: 0.0032, weighted_loss: 0.0016, label: 2, bag_size: 76037\n",
      "batch 219, loss: 0.0020, instance_loss: 0.0082, weighted_loss: 0.0039, label: 1, bag_size: 7667\n",
      "batch 239, loss: 0.2457, instance_loss: 0.0014, weighted_loss: 0.1724, label: 1, bag_size: 25323\n",
      "batch 259, loss: 0.0223, instance_loss: 0.0014, weighted_loss: 0.0160, label: 0, bag_size: 58267\n",
      "batch 279, loss: 0.0432, instance_loss: 0.0008, weighted_loss: 0.0305, label: 2, bag_size: 28252\n",
      "batch 299, loss: 0.0055, instance_loss: 0.0022, weighted_loss: 0.0045, label: 1, bag_size: 19703\n",
      "batch 319, loss: 0.0536, instance_loss: 0.0011, weighted_loss: 0.0378, label: 0, bag_size: 59072\n",
      "batch 339, loss: 0.0040, instance_loss: 0.0031, weighted_loss: 0.0037, label: 2, bag_size: 66345\n",
      "batch 359, loss: 0.0003, instance_loss: 0.0018, weighted_loss: 0.0008, label: 0, bag_size: 53568\n",
      "batch 379, loss: 0.0792, instance_loss: 0.0013, weighted_loss: 0.0559, label: 2, bag_size: 91267\n",
      "batch 399, loss: 0.9529, instance_loss: 0.0110, weighted_loss: 0.6704, label: 1, bag_size: 56549\n",
      "batch 419, loss: 0.0852, instance_loss: 0.0079, weighted_loss: 0.0620, label: 1, bag_size: 7667\n",
      "batch 439, loss: 0.0031, instance_loss: 0.0026, weighted_loss: 0.0030, label: 1, bag_size: 10935\n",
      "batch 459, loss: 0.0045, instance_loss: 0.0014, weighted_loss: 0.0035, label: 2, bag_size: 54040\n",
      "batch 479, loss: 0.0003, instance_loss: 0.0004, weighted_loss: 0.0003, label: 1, bag_size: 45270\n",
      "batch 499, loss: 0.1497, instance_loss: 0.0067, weighted_loss: 0.1068, label: 0, bag_size: 7780\n",
      "batch 519, loss: 0.0018, instance_loss: 0.0016, weighted_loss: 0.0018, label: 0, bag_size: 125590\n",
      "batch 539, loss: 0.0012, instance_loss: 0.0032, weighted_loss: 0.0018, label: 0, bag_size: 68370\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 9, train_loss: 0.1467, train_clustering_loss:  0.0046, train_error: 0.0438\n",
      "class 0: acc 0.9417989417989417, correct 178/189\n",
      "class 1: acc 0.9411764705882353, correct 176/187\n",
      "class 2: acc 0.9883720930232558, correct 170/172\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.5543, val_error: 0.1803, auc: 0.9662\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.918918918918919, correct 34/37\n",
      "class 1: acc 0.631578947368421, correct 12/19\n",
      "class 2: acc 0.8, correct 4/5\n",
      "EarlyStopping counter: 3 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0343, instance_loss: 0.0008, weighted_loss: 0.0242, label: 1, bag_size: 50027\n",
      "batch 39, loss: 0.0092, instance_loss: 0.0104, weighted_loss: 0.0096, label: 1, bag_size: 7532\n",
      "batch 59, loss: 0.7658, instance_loss: 0.0004, weighted_loss: 0.5362, label: 2, bag_size: 51554\n",
      "batch 79, loss: 0.0044, instance_loss: 0.0030, weighted_loss: 0.0040, label: 1, bag_size: 81168\n",
      "batch 99, loss: 0.0058, instance_loss: 0.0033, weighted_loss: 0.0051, label: 2, bag_size: 11415\n",
      "batch 119, loss: 0.0010, instance_loss: 0.0022, weighted_loss: 0.0013, label: 2, bag_size: 73189\n",
      "batch 139, loss: 0.0040, instance_loss: 0.0007, weighted_loss: 0.0030, label: 0, bag_size: 72752\n",
      "batch 159, loss: 0.0049, instance_loss: 0.0038, weighted_loss: 0.0046, label: 1, bag_size: 7667\n",
      "batch 179, loss: 0.0293, instance_loss: 0.0007, weighted_loss: 0.0207, label: 2, bag_size: 87135\n",
      "batch 199, loss: 0.8223, instance_loss: 0.0056, weighted_loss: 0.5772, label: 1, bag_size: 39620\n",
      "batch 219, loss: 0.0005, instance_loss: 0.0072, weighted_loss: 0.0025, label: 0, bag_size: 31339\n",
      "batch 239, loss: 0.0586, instance_loss: 0.0009, weighted_loss: 0.0413, label: 2, bag_size: 87135\n",
      "batch 259, loss: 1.1818, instance_loss: 0.0011, weighted_loss: 0.8276, label: 1, bag_size: 112356\n",
      "batch 279, loss: 0.0008, instance_loss: 0.0011, weighted_loss: 0.0009, label: 2, bag_size: 69002\n",
      "batch 299, loss: 0.0181, instance_loss: 0.0025, weighted_loss: 0.0134, label: 2, bag_size: 49458\n",
      "batch 319, loss: 0.3104, instance_loss: 0.0067, weighted_loss: 0.2193, label: 0, bag_size: 16184\n",
      "batch 339, loss: 0.0099, instance_loss: 0.0007, weighted_loss: 0.0072, label: 1, bag_size: 56317\n",
      "batch 359, loss: 0.2484, instance_loss: 0.0013, weighted_loss: 0.1743, label: 1, bag_size: 36784\n",
      "batch 379, loss: 0.0333, instance_loss: 0.0016, weighted_loss: 0.0238, label: 1, bag_size: 81168\n",
      "batch 399, loss: 0.0061, instance_loss: 0.0024, weighted_loss: 0.0050, label: 1, bag_size: 16596\n",
      "batch 419, loss: 0.0207, instance_loss: 0.0017, weighted_loss: 0.0150, label: 2, bag_size: 47138\n",
      "batch 439, loss: 0.0429, instance_loss: 0.0072, weighted_loss: 0.0321, label: 0, bag_size: 68286\n",
      "batch 459, loss: 0.0009, instance_loss: 0.0080, weighted_loss: 0.0031, label: 0, bag_size: 11462\n",
      "batch 479, loss: 0.0392, instance_loss: 0.0000, weighted_loss: 0.0275, label: 1, bag_size: 105100\n",
      "batch 499, loss: 0.0014, instance_loss: 0.0006, weighted_loss: 0.0012, label: 0, bag_size: 94161\n",
      "batch 519, loss: 0.0019, instance_loss: 0.0005, weighted_loss: 0.0015, label: 0, bag_size: 87072\n",
      "batch 539, loss: 0.0940, instance_loss: 0.0004, weighted_loss: 0.0659, label: 0, bag_size: 80927\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 10, train_loss: 0.1479, train_clustering_loss:  0.0050, train_error: 0.0529\n",
      "class 0: acc 0.9364161849710982, correct 162/173\n",
      "class 1: acc 0.9210526315789473, correct 175/190\n",
      "class 2: acc 0.9837837837837838, correct 182/185\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4840, val_error: 0.1639, auc: 0.9730\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8918918918918919, correct 33/37\n",
      "class 1: acc 0.6842105263157895, correct 13/19\n",
      "class 2: acc 1.0, correct 5/5\n",
      "EarlyStopping counter: 4 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0006, instance_loss: 0.0019, weighted_loss: 0.0010, label: 2, bag_size: 38471\n",
      "batch 39, loss: 0.0120, instance_loss: 0.0119, weighted_loss: 0.0120, label: 1, bag_size: 8990\n",
      "batch 59, loss: 0.0517, instance_loss: 0.0058, weighted_loss: 0.0379, label: 2, bag_size: 5173\n",
      "batch 79, loss: 0.0074, instance_loss: 0.0005, weighted_loss: 0.0054, label: 1, bag_size: 54288\n",
      "batch 99, loss: 0.0011, instance_loss: 0.0052, weighted_loss: 0.0023, label: 1, bag_size: 44760\n",
      "batch 119, loss: 0.1279, instance_loss: 0.0086, weighted_loss: 0.0921, label: 0, bag_size: 22685\n",
      "batch 139, loss: 0.0445, instance_loss: 0.0022, weighted_loss: 0.0318, label: 2, bag_size: 58111\n",
      "batch 159, loss: 0.0288, instance_loss: 0.0019, weighted_loss: 0.0207, label: 0, bag_size: 19035\n",
      "batch 179, loss: 0.1451, instance_loss: 0.0028, weighted_loss: 0.1024, label: 1, bag_size: 86027\n",
      "batch 199, loss: 0.0038, instance_loss: 0.0004, weighted_loss: 0.0027, label: 1, bag_size: 50525\n",
      "batch 219, loss: 0.0015, instance_loss: 0.0024, weighted_loss: 0.0017, label: 0, bag_size: 23506\n",
      "batch 239, loss: 0.0002, instance_loss: 0.0018, weighted_loss: 0.0007, label: 0, bag_size: 45490\n",
      "batch 259, loss: 0.0060, instance_loss: 0.0021, weighted_loss: 0.0048, label: 2, bag_size: 30615\n",
      "batch 279, loss: 0.0727, instance_loss: 0.0171, weighted_loss: 0.0560, label: 2, bag_size: 34408\n",
      "batch 299, loss: 0.0001, instance_loss: 0.0069, weighted_loss: 0.0021, label: 2, bag_size: 73189\n",
      "batch 319, loss: 0.0661, instance_loss: 0.2013, weighted_loss: 0.1067, label: 2, bag_size: 1329\n",
      "batch 339, loss: 0.0029, instance_loss: 0.0031, weighted_loss: 0.0030, label: 1, bag_size: 100810\n",
      "batch 359, loss: 0.0003, instance_loss: 0.0007, weighted_loss: 0.0004, label: 0, bag_size: 23753\n",
      "batch 379, loss: 0.0797, instance_loss: 0.0021, weighted_loss: 0.0565, label: 0, bag_size: 45910\n",
      "batch 399, loss: 0.0151, instance_loss: 0.0019, weighted_loss: 0.0112, label: 2, bag_size: 66801\n",
      "batch 419, loss: 2.4981, instance_loss: 0.0069, weighted_loss: 1.7507, label: 0, bag_size: 16783\n",
      "batch 439, loss: 0.0052, instance_loss: 0.0030, weighted_loss: 0.0046, label: 0, bag_size: 61984\n",
      "batch 459, loss: 0.0812, instance_loss: 0.0022, weighted_loss: 0.0575, label: 0, bag_size: 104395\n",
      "batch 479, loss: 0.0084, instance_loss: 0.0057, weighted_loss: 0.0076, label: 0, bag_size: 9867\n",
      "batch 499, loss: 0.0027, instance_loss: 0.0029, weighted_loss: 0.0027, label: 0, bag_size: 114320\n",
      "batch 519, loss: 0.0187, instance_loss: 0.0029, weighted_loss: 0.0139, label: 2, bag_size: 69080\n",
      "batch 539, loss: 0.1423, instance_loss: 0.0062, weighted_loss: 0.1015, label: 0, bag_size: 10082\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 11, train_loss: 0.1515, train_clustering_loss:  0.0057, train_error: 0.0529\n",
      "class 0: acc 0.9162303664921466, correct 175/191\n",
      "class 1: acc 0.9365079365079365, correct 177/189\n",
      "class 2: acc 0.9940476190476191, correct 167/168\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3999, val_error: 0.1475, auc: 0.9715\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.918918918918919, correct 34/37\n",
      "class 1: acc 0.7368421052631579, correct 14/19\n",
      "class 2: acc 0.8, correct 4/5\n",
      "EarlyStopping counter: 5 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0009, instance_loss: 0.0048, weighted_loss: 0.0021, label: 2, bag_size: 30615\n",
      "batch 39, loss: 0.0060, instance_loss: 0.0028, weighted_loss: 0.0050, label: 1, bag_size: 113186\n",
      "batch 59, loss: 0.0066, instance_loss: 0.0057, weighted_loss: 0.0063, label: 1, bag_size: 12138\n",
      "batch 79, loss: 0.0032, instance_loss: 0.0074, weighted_loss: 0.0044, label: 0, bag_size: 16262\n",
      "batch 99, loss: 0.0269, instance_loss: 0.0010, weighted_loss: 0.0191, label: 0, bag_size: 70596\n",
      "batch 119, loss: 0.1662, instance_loss: 0.0037, weighted_loss: 0.1174, label: 0, bag_size: 43883\n",
      "batch 139, loss: 0.1195, instance_loss: 0.0183, weighted_loss: 0.0891, label: 2, bag_size: 16282\n",
      "batch 159, loss: 0.0078, instance_loss: 0.0006, weighted_loss: 0.0056, label: 0, bag_size: 78345\n",
      "batch 179, loss: 0.0005, instance_loss: 0.0065, weighted_loss: 0.0023, label: 2, bag_size: 38471\n",
      "batch 199, loss: 0.1860, instance_loss: 0.0061, weighted_loss: 0.1320, label: 2, bag_size: 82484\n",
      "batch 219, loss: 0.0005, instance_loss: 0.0027, weighted_loss: 0.0012, label: 2, bag_size: 51251\n",
      "batch 239, loss: 0.0000, instance_loss: 0.0022, weighted_loss: 0.0007, label: 0, bag_size: 9374\n",
      "batch 259, loss: 0.0008, instance_loss: 0.0029, weighted_loss: 0.0014, label: 1, bag_size: 7199\n",
      "batch 279, loss: 0.0001, instance_loss: 0.0008, weighted_loss: 0.0003, label: 0, bag_size: 65462\n",
      "batch 299, loss: 0.0001, instance_loss: 0.0005, weighted_loss: 0.0002, label: 0, bag_size: 69160\n",
      "batch 319, loss: 0.9062, instance_loss: 0.0016, weighted_loss: 0.6348, label: 1, bag_size: 92502\n",
      "batch 339, loss: 0.0085, instance_loss: 0.0122, weighted_loss: 0.0096, label: 1, bag_size: 47660\n",
      "batch 359, loss: 0.2708, instance_loss: 0.0026, weighted_loss: 0.1904, label: 1, bag_size: 35189\n",
      "batch 379, loss: 0.0051, instance_loss: 0.0015, weighted_loss: 0.0040, label: 2, bag_size: 69080\n",
      "batch 399, loss: 0.0018, instance_loss: 0.0038, weighted_loss: 0.0024, label: 1, bag_size: 10935\n",
      "batch 419, loss: 0.0641, instance_loss: 0.0020, weighted_loss: 0.0455, label: 2, bag_size: 36673\n",
      "batch 439, loss: 1.0532, instance_loss: 0.0006, weighted_loss: 0.7374, label: 0, bag_size: 90782\n",
      "batch 459, loss: 0.1157, instance_loss: 0.0040, weighted_loss: 0.0822, label: 1, bag_size: 49597\n",
      "batch 479, loss: 0.1301, instance_loss: 0.0068, weighted_loss: 0.0931, label: 1, bag_size: 22039\n",
      "batch 499, loss: 0.0537, instance_loss: 0.0046, weighted_loss: 0.0390, label: 1, bag_size: 54312\n",
      "batch 519, loss: 0.0208, instance_loss: 0.0020, weighted_loss: 0.0152, label: 2, bag_size: 59224\n",
      "batch 539, loss: 0.2348, instance_loss: 0.0019, weighted_loss: 0.1650, label: 0, bag_size: 56229\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 12, train_loss: 0.1594, train_clustering_loss:  0.0046, train_error: 0.0620\n",
      "class 0: acc 0.9211822660098522, correct 187/203\n",
      "class 1: acc 0.9156626506024096, correct 152/166\n",
      "class 2: acc 0.9776536312849162, correct 175/179\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4556, val_error: 0.1639, auc: 0.9705\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.9459459459459459, correct 35/37\n",
      "class 1: acc 0.631578947368421, correct 12/19\n",
      "class 2: acc 0.8, correct 4/5\n",
      "EarlyStopping counter: 6 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0074, instance_loss: 0.0052, weighted_loss: 0.0068, label: 1, bag_size: 15902\n",
      "batch 39, loss: 0.0029, instance_loss: 0.0027, weighted_loss: 0.0028, label: 1, bag_size: 25106\n",
      "batch 59, loss: 0.0629, instance_loss: 0.0519, weighted_loss: 0.0596, label: 0, bag_size: 84026\n",
      "batch 79, loss: 0.2948, instance_loss: 0.0018, weighted_loss: 0.2069, label: 2, bag_size: 91267\n",
      "batch 99, loss: 0.0157, instance_loss: 0.0074, weighted_loss: 0.0132, label: 0, bag_size: 30263\n",
      "batch 119, loss: 0.0393, instance_loss: 0.0000, weighted_loss: 0.0275, label: 2, bag_size: 103823\n",
      "batch 139, loss: 0.3171, instance_loss: 0.0044, weighted_loss: 0.2233, label: 2, bag_size: 52584\n",
      "batch 159, loss: 0.0003, instance_loss: 0.0002, weighted_loss: 0.0003, label: 0, bag_size: 92502\n",
      "batch 179, loss: 0.0024, instance_loss: 0.0017, weighted_loss: 0.0022, label: 0, bag_size: 70900\n",
      "batch 199, loss: 0.0176, instance_loss: 0.0018, weighted_loss: 0.0128, label: 2, bag_size: 66023\n",
      "batch 219, loss: 0.0003, instance_loss: 0.0036, weighted_loss: 0.0013, label: 1, bag_size: 55681\n",
      "batch 239, loss: 0.0919, instance_loss: 0.0065, weighted_loss: 0.0662, label: 2, bag_size: 23841\n",
      "batch 259, loss: 0.0230, instance_loss: 0.0108, weighted_loss: 0.0193, label: 0, bag_size: 49951\n",
      "batch 279, loss: 0.0062, instance_loss: 0.0057, weighted_loss: 0.0061, label: 1, bag_size: 66357\n",
      "batch 299, loss: 0.0046, instance_loss: 0.0059, weighted_loss: 0.0050, label: 2, bag_size: 84436\n",
      "batch 319, loss: 0.0127, instance_loss: 0.0082, weighted_loss: 0.0114, label: 1, bag_size: 10520\n",
      "batch 339, loss: 0.0220, instance_loss: 0.0074, weighted_loss: 0.0176, label: 2, bag_size: 59810\n",
      "batch 359, loss: 0.0008, instance_loss: 0.0002, weighted_loss: 0.0006, label: 1, bag_size: 57799\n",
      "batch 379, loss: 0.0177, instance_loss: 0.0125, weighted_loss: 0.0162, label: 2, bag_size: 11415\n",
      "batch 399, loss: 0.1057, instance_loss: 0.0027, weighted_loss: 0.0748, label: 1, bag_size: 108492\n",
      "batch 419, loss: 0.0011, instance_loss: 0.0032, weighted_loss: 0.0017, label: 0, bag_size: 93189\n",
      "batch 439, loss: 0.0019, instance_loss: 0.0007, weighted_loss: 0.0016, label: 0, bag_size: 71332\n",
      "batch 459, loss: 0.0096, instance_loss: 0.0036, weighted_loss: 0.0078, label: 2, bag_size: 75833\n",
      "batch 479, loss: 0.0564, instance_loss: 0.0052, weighted_loss: 0.0410, label: 0, bag_size: 53043\n",
      "batch 499, loss: 0.0002, instance_loss: 0.0003, weighted_loss: 0.0002, label: 0, bag_size: 67625\n",
      "batch 519, loss: 0.0351, instance_loss: 0.0195, weighted_loss: 0.0304, label: 2, bag_size: 16282\n",
      "batch 539, loss: 0.0006, instance_loss: 0.0015, weighted_loss: 0.0008, label: 1, bag_size: 46798\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 13, train_loss: 0.1429, train_clustering_loss:  0.0061, train_error: 0.0383\n",
      "class 0: acc 0.9281767955801105, correct 168/181\n",
      "class 1: acc 0.9653465346534653, correct 195/202\n",
      "class 2: acc 0.9939393939393939, correct 164/165\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3291, val_error: 0.0984, auc: 0.9725\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.918918918918919, correct 34/37\n",
      "class 1: acc 0.8947368421052632, correct 17/19\n",
      "class 2: acc 0.8, correct 4/5\n",
      "EarlyStopping counter: 7 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0092, instance_loss: 0.0012, weighted_loss: 0.0068, label: 0, bag_size: 53188\n",
      "batch 39, loss: 0.5084, instance_loss: 0.0062, weighted_loss: 0.3577, label: 0, bag_size: 50515\n",
      "batch 59, loss: 0.1172, instance_loss: 0.0036, weighted_loss: 0.0831, label: 0, bag_size: 79156\n",
      "batch 79, loss: 0.0002, instance_loss: 0.0025, weighted_loss: 0.0009, label: 2, bag_size: 36673\n",
      "batch 99, loss: 0.0031, instance_loss: 0.0030, weighted_loss: 0.0031, label: 0, bag_size: 47840\n",
      "batch 119, loss: 0.0184, instance_loss: 0.0137, weighted_loss: 0.0170, label: 1, bag_size: 18525\n",
      "batch 139, loss: 0.0106, instance_loss: 0.0018, weighted_loss: 0.0080, label: 0, bag_size: 71326\n",
      "batch 159, loss: 0.1525, instance_loss: 0.0007, weighted_loss: 0.1070, label: 1, bag_size: 32084\n",
      "batch 179, loss: 0.0002, instance_loss: 0.0087, weighted_loss: 0.0028, label: 1, bag_size: 11509\n",
      "batch 199, loss: 0.0663, instance_loss: 0.0036, weighted_loss: 0.0475, label: 2, bag_size: 50246\n",
      "batch 219, loss: 0.0137, instance_loss: 0.0053, weighted_loss: 0.0112, label: 2, bag_size: 65317\n",
      "batch 239, loss: 1.5260, instance_loss: 0.0024, weighted_loss: 1.0689, label: 0, bag_size: 53401\n",
      "batch 259, loss: 0.4182, instance_loss: 0.0005, weighted_loss: 0.2929, label: 2, bag_size: 64675\n",
      "batch 279, loss: 0.0449, instance_loss: 0.0025, weighted_loss: 0.0322, label: 1, bag_size: 78923\n",
      "batch 299, loss: 0.2933, instance_loss: 0.0096, weighted_loss: 0.2082, label: 1, bag_size: 9486\n",
      "batch 319, loss: 0.0056, instance_loss: 0.0031, weighted_loss: 0.0049, label: 1, bag_size: 81168\n",
      "batch 339, loss: 0.0599, instance_loss: 0.0000, weighted_loss: 0.0419, label: 2, bag_size: 64675\n",
      "batch 359, loss: 0.8566, instance_loss: 0.0046, weighted_loss: 0.6010, label: 1, bag_size: 67478\n",
      "batch 379, loss: 0.3079, instance_loss: 0.0048, weighted_loss: 0.2170, label: 0, bag_size: 11284\n",
      "batch 399, loss: 0.0356, instance_loss: 0.0012, weighted_loss: 0.0253, label: 2, bag_size: 59224\n",
      "batch 419, loss: 0.0158, instance_loss: 0.0008, weighted_loss: 0.0113, label: 2, bag_size: 87135\n",
      "batch 439, loss: 0.0073, instance_loss: 0.0016, weighted_loss: 0.0056, label: 2, bag_size: 50978\n",
      "batch 459, loss: 0.0012, instance_loss: 0.0013, weighted_loss: 0.0012, label: 0, bag_size: 62681\n",
      "batch 479, loss: 0.6378, instance_loss: 0.0007, weighted_loss: 0.4466, label: 0, bag_size: 78560\n",
      "batch 499, loss: 0.1199, instance_loss: 0.0002, weighted_loss: 0.0840, label: 0, bag_size: 62258\n",
      "batch 519, loss: 0.0202, instance_loss: 0.0029, weighted_loss: 0.0150, label: 0, bag_size: 23076\n",
      "batch 539, loss: 0.0019, instance_loss: 0.0006, weighted_loss: 0.0015, label: 2, bag_size: 51017\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.999771897810219: correct 4383/4384\n",
      "Epoch: 14, train_loss: 0.1576, train_clustering_loss:  0.0054, train_error: 0.0474\n",
      "class 0: acc 0.9202127659574468, correct 173/188\n",
      "class 1: acc 0.94375, correct 151/160\n",
      "class 2: acc 0.99, correct 198/200\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3957, val_error: 0.1475, auc: 0.9704\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.918918918918919, correct 34/37\n",
      "class 1: acc 0.7368421052631579, correct 14/19\n",
      "class 2: acc 0.8, correct 4/5\n",
      "EarlyStopping counter: 8 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.1355, instance_loss: 0.0006, weighted_loss: 0.0950, label: 2, bag_size: 59224\n",
      "batch 39, loss: 0.0343, instance_loss: 0.0031, weighted_loss: 0.0249, label: 2, bag_size: 54265\n",
      "batch 59, loss: 0.0401, instance_loss: 0.0034, weighted_loss: 0.0291, label: 2, bag_size: 11415\n",
      "batch 79, loss: 0.0004, instance_loss: 0.0003, weighted_loss: 0.0004, label: 1, bag_size: 53245\n",
      "batch 99, loss: 0.0242, instance_loss: 0.0010, weighted_loss: 0.0172, label: 0, bag_size: 48589\n",
      "batch 119, loss: 0.0057, instance_loss: 0.0000, weighted_loss: 0.0040, label: 2, bag_size: 38471\n",
      "batch 139, loss: 0.0829, instance_loss: 0.0040, weighted_loss: 0.0592, label: 2, bag_size: 5173\n",
      "batch 159, loss: 0.1781, instance_loss: 0.0212, weighted_loss: 0.1310, label: 2, bag_size: 16282\n",
      "batch 179, loss: 0.0418, instance_loss: 0.0023, weighted_loss: 0.0299, label: 2, bag_size: 58111\n",
      "batch 199, loss: 0.2009, instance_loss: 0.0018, weighted_loss: 0.1412, label: 2, bag_size: 68302\n",
      "batch 219, loss: 0.0010, instance_loss: 0.0060, weighted_loss: 0.0025, label: 2, bag_size: 84436\n",
      "batch 239, loss: 0.0003, instance_loss: 0.0008, weighted_loss: 0.0004, label: 0, bag_size: 62694\n",
      "batch 259, loss: 0.0020, instance_loss: 0.0056, weighted_loss: 0.0031, label: 1, bag_size: 11316\n",
      "batch 279, loss: 0.4285, instance_loss: 0.0017, weighted_loss: 0.3005, label: 1, bag_size: 72255\n",
      "batch 299, loss: 0.0643, instance_loss: 0.0042, weighted_loss: 0.0463, label: 1, bag_size: 23736\n",
      "batch 319, loss: 0.0003, instance_loss: 0.0033, weighted_loss: 0.0012, label: 0, bag_size: 23506\n",
      "batch 339, loss: 0.0090, instance_loss: 0.0036, weighted_loss: 0.0074, label: 1, bag_size: 10151\n",
      "batch 359, loss: 0.0199, instance_loss: 0.0016, weighted_loss: 0.0144, label: 1, bag_size: 50027\n",
      "batch 379, loss: 0.2056, instance_loss: 0.0010, weighted_loss: 0.1442, label: 2, bag_size: 64675\n",
      "batch 399, loss: 2.8165, instance_loss: 0.0003, weighted_loss: 1.9716, label: 1, bag_size: 48985\n",
      "batch 419, loss: 0.2727, instance_loss: 0.0174, weighted_loss: 0.1961, label: 0, bag_size: 33130\n",
      "batch 439, loss: 0.0059, instance_loss: 0.0025, weighted_loss: 0.0049, label: 2, bag_size: 47603\n",
      "batch 459, loss: 0.0254, instance_loss: 0.0105, weighted_loss: 0.0209, label: 1, bag_size: 172802\n",
      "batch 479, loss: 0.0202, instance_loss: 0.0016, weighted_loss: 0.0146, label: 1, bag_size: 24044\n",
      "batch 499, loss: 0.4184, instance_loss: 0.0116, weighted_loss: 0.2964, label: 1, bag_size: 53467\n",
      "batch 519, loss: 0.0432, instance_loss: 0.0019, weighted_loss: 0.0308, label: 1, bag_size: 102214\n",
      "batch 539, loss: 0.0017, instance_loss: 0.0030, weighted_loss: 0.0021, label: 0, bag_size: 103422\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 15, train_loss: 0.1414, train_clustering_loss:  0.0046, train_error: 0.0529\n",
      "class 0: acc 0.9243243243243243, correct 171/185\n",
      "class 1: acc 0.9261363636363636, correct 163/176\n",
      "class 2: acc 0.9893048128342246, correct 185/187\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4485, val_error: 0.1475, auc: 0.9621\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.918918918918919, correct 34/37\n",
      "class 1: acc 0.7368421052631579, correct 14/19\n",
      "class 2: acc 0.8, correct 4/5\n",
      "EarlyStopping counter: 9 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 1.7778, instance_loss: 0.0008, weighted_loss: 1.2447, label: 0, bag_size: 78533\n",
      "batch 39, loss: 0.1040, instance_loss: 0.0100, weighted_loss: 0.0758, label: 0, bag_size: 68286\n",
      "batch 59, loss: 0.1792, instance_loss: 0.0104, weighted_loss: 0.1286, label: 1, bag_size: 53467\n",
      "batch 79, loss: 0.0033, instance_loss: 0.0067, weighted_loss: 0.0043, label: 2, bag_size: 82484\n",
      "batch 99, loss: 0.0010, instance_loss: 0.0038, weighted_loss: 0.0018, label: 2, bag_size: 48881\n",
      "batch 119, loss: 0.0013, instance_loss: 0.0064, weighted_loss: 0.0029, label: 2, bag_size: 82484\n",
      "batch 139, loss: 0.0057, instance_loss: 0.0059, weighted_loss: 0.0058, label: 0, bag_size: 103422\n",
      "batch 159, loss: 0.0046, instance_loss: 0.0053, weighted_loss: 0.0048, label: 0, bag_size: 93767\n",
      "batch 179, loss: 0.5402, instance_loss: 0.0030, weighted_loss: 0.3790, label: 2, bag_size: 87135\n",
      "batch 199, loss: 0.0446, instance_loss: 0.0188, weighted_loss: 0.0369, label: 1, bag_size: 22123\n",
      "batch 219, loss: 0.0527, instance_loss: 0.0009, weighted_loss: 0.0372, label: 2, bag_size: 72966\n",
      "batch 239, loss: 0.0004, instance_loss: 0.0153, weighted_loss: 0.0048, label: 2, bag_size: 84436\n",
      "batch 259, loss: 0.0014, instance_loss: 0.0085, weighted_loss: 0.0035, label: 0, bag_size: 11462\n",
      "batch 279, loss: 0.0145, instance_loss: 0.0052, weighted_loss: 0.0117, label: 2, bag_size: 63921\n",
      "batch 299, loss: 0.0040, instance_loss: 0.0041, weighted_loss: 0.0040, label: 2, bag_size: 76037\n",
      "batch 319, loss: 0.0000, instance_loss: 0.0019, weighted_loss: 0.0006, label: 0, bag_size: 45490\n",
      "batch 339, loss: 0.0192, instance_loss: 0.0053, weighted_loss: 0.0151, label: 2, bag_size: 63921\n",
      "batch 359, loss: 0.0019, instance_loss: 0.0029, weighted_loss: 0.0022, label: 2, bag_size: 35505\n",
      "batch 379, loss: 0.0001, instance_loss: 0.0041, weighted_loss: 0.0013, label: 0, bag_size: 9374\n",
      "batch 399, loss: 0.0145, instance_loss: 0.0039, weighted_loss: 0.0113, label: 0, bag_size: 36475\n",
      "batch 419, loss: 0.0001, instance_loss: 0.0017, weighted_loss: 0.0006, label: 0, bag_size: 22064\n",
      "batch 439, loss: 0.5310, instance_loss: 0.0006, weighted_loss: 0.3719, label: 1, bag_size: 105100\n",
      "batch 459, loss: 0.0009, instance_loss: 0.0020, weighted_loss: 0.0012, label: 2, bag_size: 72015\n",
      "batch 479, loss: 0.0006, instance_loss: 0.0027, weighted_loss: 0.0012, label: 2, bag_size: 66345\n",
      "batch 499, loss: 0.0444, instance_loss: 0.0010, weighted_loss: 0.0314, label: 1, bag_size: 49445\n",
      "batch 519, loss: 0.0242, instance_loss: 0.0009, weighted_loss: 0.0172, label: 0, bag_size: 65592\n",
      "batch 539, loss: 0.0017, instance_loss: 0.0116, weighted_loss: 0.0047, label: 2, bag_size: 36978\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 16, train_loss: 0.1462, train_clustering_loss:  0.0060, train_error: 0.0438\n",
      "class 0: acc 0.9346733668341709, correct 186/199\n",
      "class 1: acc 0.96875, correct 155/160\n",
      "class 2: acc 0.9682539682539683, correct 183/189\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3303, val_error: 0.1475, auc: 0.9753\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.918918918918919, correct 34/37\n",
      "class 1: acc 0.7368421052631579, correct 14/19\n",
      "class 2: acc 0.8, correct 4/5\n",
      "Validation loss decreased (0.974158 --> 0.975262).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0015, instance_loss: 0.0084, weighted_loss: 0.0036, label: 2, bag_size: 46661\n",
      "batch 39, loss: 3.6984, instance_loss: 0.0023, weighted_loss: 2.5896, label: 0, bag_size: 72970\n",
      "batch 59, loss: 0.0132, instance_loss: 0.0005, weighted_loss: 0.0094, label: 0, bag_size: 70204\n",
      "batch 79, loss: 0.0262, instance_loss: 0.0032, weighted_loss: 0.0193, label: 2, bag_size: 56310\n",
      "batch 99, loss: 0.0031, instance_loss: 0.0054, weighted_loss: 0.0038, label: 1, bag_size: 66357\n",
      "batch 119, loss: 0.1534, instance_loss: 0.0032, weighted_loss: 0.1083, label: 1, bag_size: 31015\n",
      "batch 139, loss: 0.0125, instance_loss: 0.0013, weighted_loss: 0.0091, label: 1, bag_size: 79045\n",
      "batch 159, loss: 0.0034, instance_loss: 0.0039, weighted_loss: 0.0036, label: 1, bag_size: 113186\n",
      "batch 179, loss: 0.0559, instance_loss: 0.0077, weighted_loss: 0.0415, label: 1, bag_size: 115202\n",
      "batch 199, loss: 0.2490, instance_loss: 0.0060, weighted_loss: 0.1761, label: 2, bag_size: 63921\n",
      "batch 219, loss: 0.0002, instance_loss: 0.0041, weighted_loss: 0.0013, label: 0, bag_size: 54512\n",
      "batch 239, loss: 0.0004, instance_loss: 0.0027, weighted_loss: 0.0011, label: 1, bag_size: 42941\n",
      "batch 259, loss: 0.0010, instance_loss: 0.0002, weighted_loss: 0.0007, label: 1, bag_size: 55668\n",
      "batch 279, loss: 0.7379, instance_loss: 0.0015, weighted_loss: 0.5170, label: 0, bag_size: 78533\n",
      "batch 299, loss: 0.1598, instance_loss: 0.0008, weighted_loss: 0.1121, label: 0, bag_size: 106095\n",
      "batch 319, loss: 0.0076, instance_loss: 0.0093, weighted_loss: 0.0081, label: 1, bag_size: 8990\n",
      "batch 339, loss: 0.0011, instance_loss: 0.0019, weighted_loss: 0.0013, label: 2, bag_size: 69002\n",
      "batch 359, loss: 2.7902, instance_loss: 0.0029, weighted_loss: 1.9540, label: 0, bag_size: 72970\n",
      "batch 379, loss: 0.0024, instance_loss: 0.0016, weighted_loss: 0.0022, label: 0, bag_size: 66432\n",
      "batch 399, loss: 1.0437, instance_loss: 0.0009, weighted_loss: 0.7308, label: 1, bag_size: 92502\n",
      "batch 419, loss: 0.0160, instance_loss: 0.0002, weighted_loss: 0.0112, label: 1, bag_size: 56317\n",
      "batch 439, loss: 0.0169, instance_loss: 0.0039, weighted_loss: 0.0130, label: 2, bag_size: 46661\n",
      "batch 459, loss: 0.0003, instance_loss: 0.0011, weighted_loss: 0.0006, label: 0, bag_size: 59101\n",
      "batch 479, loss: 0.0001, instance_loss: 0.0015, weighted_loss: 0.0006, label: 2, bag_size: 47603\n",
      "batch 499, loss: 0.0001, instance_loss: 0.0056, weighted_loss: 0.0017, label: 2, bag_size: 50045\n",
      "batch 519, loss: 0.0744, instance_loss: 0.0007, weighted_loss: 0.0523, label: 2, bag_size: 66023\n",
      "batch 539, loss: 0.2007, instance_loss: 0.0041, weighted_loss: 0.1417, label: 0, bag_size: 49839\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 17, train_loss: 0.1724, train_clustering_loss:  0.0062, train_error: 0.0620\n",
      "class 0: acc 0.9144385026737968, correct 171/187\n",
      "class 1: acc 0.9209039548022598, correct 163/177\n",
      "class 2: acc 0.9782608695652174, correct 180/184\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3313, val_error: 0.1311, auc: 0.9741\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.918918918918919, correct 34/37\n",
      "class 1: acc 0.7894736842105263, correct 15/19\n",
      "class 2: acc 0.8, correct 4/5\n",
      "EarlyStopping counter: 1 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0122, instance_loss: 0.0020, weighted_loss: 0.0092, label: 0, bag_size: 57597\n",
      "batch 39, loss: 0.0946, instance_loss: 0.0012, weighted_loss: 0.0666, label: 2, bag_size: 52924\n",
      "batch 59, loss: 0.0029, instance_loss: 0.0083, weighted_loss: 0.0045, label: 1, bag_size: 44591\n",
      "batch 79, loss: 0.3052, instance_loss: 0.0210, weighted_loss: 0.2200, label: 0, bag_size: 7318\n",
      "batch 99, loss: 0.0299, instance_loss: 0.0025, weighted_loss: 0.0217, label: 2, bag_size: 38471\n",
      "batch 119, loss: 0.0106, instance_loss: 0.0000, weighted_loss: 0.0074, label: 2, bag_size: 103823\n",
      "batch 139, loss: 0.0030, instance_loss: 0.0038, weighted_loss: 0.0032, label: 2, bag_size: 66023\n",
      "batch 159, loss: 0.0022, instance_loss: 0.0038, weighted_loss: 0.0027, label: 0, bag_size: 86382\n",
      "batch 179, loss: 0.0036, instance_loss: 0.0024, weighted_loss: 0.0032, label: 0, bag_size: 80173\n",
      "batch 199, loss: 0.0795, instance_loss: 0.0103, weighted_loss: 0.0587, label: 0, bag_size: 7780\n",
      "batch 219, loss: 0.1241, instance_loss: 0.0057, weighted_loss: 0.0886, label: 1, bag_size: 23736\n",
      "batch 239, loss: 0.0046, instance_loss: 0.0007, weighted_loss: 0.0034, label: 0, bag_size: 64143\n",
      "batch 259, loss: 0.1080, instance_loss: 0.0013, weighted_loss: 0.0760, label: 0, bag_size: 53043\n",
      "batch 279, loss: 0.0209, instance_loss: 0.0020, weighted_loss: 0.0152, label: 2, bag_size: 38471\n",
      "batch 299, loss: 0.0145, instance_loss: 0.0015, weighted_loss: 0.0106, label: 2, bag_size: 54040\n",
      "batch 319, loss: 0.1843, instance_loss: 0.0552, weighted_loss: 0.1456, label: 1, bag_size: 5928\n",
      "batch 339, loss: 0.7884, instance_loss: 0.0034, weighted_loss: 0.5529, label: 2, bag_size: 78955\n",
      "batch 359, loss: 0.0017, instance_loss: 0.0097, weighted_loss: 0.0041, label: 1, bag_size: 54398\n",
      "batch 379, loss: 0.0208, instance_loss: 0.0009, weighted_loss: 0.0148, label: 0, bag_size: 85831\n",
      "batch 399, loss: 0.0004, instance_loss: 0.0032, weighted_loss: 0.0012, label: 2, bag_size: 76037\n",
      "batch 419, loss: 0.0050, instance_loss: 0.0024, weighted_loss: 0.0042, label: 2, bag_size: 71763\n",
      "batch 439, loss: 0.0028, instance_loss: 0.0008, weighted_loss: 0.0022, label: 1, bag_size: 61109\n",
      "batch 459, loss: 0.0005, instance_loss: 0.0024, weighted_loss: 0.0011, label: 0, bag_size: 54937\n",
      "batch 479, loss: 0.0011, instance_loss: 0.0049, weighted_loss: 0.0022, label: 2, bag_size: 73189\n",
      "batch 499, loss: 0.1937, instance_loss: 0.0032, weighted_loss: 0.1366, label: 1, bag_size: 12019\n",
      "batch 519, loss: 0.0009, instance_loss: 0.0012, weighted_loss: 0.0010, label: 1, bag_size: 67008\n",
      "batch 539, loss: 0.0039, instance_loss: 0.0021, weighted_loss: 0.0034, label: 0, bag_size: 104395\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.999771897810219: correct 4383/4384\n",
      "Epoch: 18, train_loss: 0.1009, train_clustering_loss:  0.0051, train_error: 0.0401\n",
      "class 0: acc 0.9347826086956522, correct 172/184\n",
      "class 1: acc 0.9491525423728814, correct 168/177\n",
      "class 2: acc 0.9946524064171123, correct 186/187\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.6180, val_error: 0.1639, auc: 0.9684\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.9459459459459459, correct 35/37\n",
      "class 1: acc 0.631578947368421, correct 12/19\n",
      "class 2: acc 0.8, correct 4/5\n",
      "EarlyStopping counter: 2 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0013, instance_loss: 0.0042, weighted_loss: 0.0022, label: 1, bag_size: 113716\n",
      "batch 39, loss: 0.0460, instance_loss: 0.0033, weighted_loss: 0.0332, label: 1, bag_size: 73500\n",
      "batch 59, loss: 0.0038, instance_loss: 0.0034, weighted_loss: 0.0037, label: 1, bag_size: 25106\n",
      "batch 79, loss: 0.0134, instance_loss: 0.0488, weighted_loss: 0.0240, label: 1, bag_size: 5928\n",
      "batch 99, loss: 0.0088, instance_loss: 0.0032, weighted_loss: 0.0071, label: 0, bag_size: 78549\n",
      "batch 119, loss: 0.0008, instance_loss: 0.0002, weighted_loss: 0.0006, label: 0, bag_size: 64143\n",
      "batch 139, loss: 0.2581, instance_loss: 0.0027, weighted_loss: 0.1815, label: 0, bag_size: 93784\n",
      "batch 159, loss: 0.0000, instance_loss: 0.0022, weighted_loss: 0.0007, label: 0, bag_size: 45490\n",
      "batch 179, loss: 0.1297, instance_loss: 0.0033, weighted_loss: 0.0918, label: 1, bag_size: 91540\n",
      "batch 199, loss: 0.0604, instance_loss: 0.0002, weighted_loss: 0.0424, label: 2, bag_size: 51554\n",
      "batch 219, loss: 0.0896, instance_loss: 0.0021, weighted_loss: 0.0633, label: 0, bag_size: 52653\n",
      "batch 239, loss: 0.0007, instance_loss: 0.0035, weighted_loss: 0.0015, label: 1, bag_size: 54398\n",
      "batch 259, loss: 0.0283, instance_loss: 0.0045, weighted_loss: 0.0212, label: 1, bag_size: 114407\n",
      "batch 279, loss: 0.0006, instance_loss: 0.0024, weighted_loss: 0.0012, label: 0, bag_size: 114059\n",
      "batch 299, loss: 0.0012, instance_loss: 0.0036, weighted_loss: 0.0019, label: 1, bag_size: 74826\n",
      "batch 319, loss: 0.0600, instance_loss: 0.0034, weighted_loss: 0.0430, label: 0, bag_size: 59429\n",
      "batch 339, loss: 0.1120, instance_loss: 0.0019, weighted_loss: 0.0790, label: 0, bag_size: 74863\n",
      "batch 359, loss: 0.0132, instance_loss: 0.0012, weighted_loss: 0.0096, label: 2, bag_size: 38471\n",
      "batch 379, loss: 0.0043, instance_loss: 0.0049, weighted_loss: 0.0045, label: 0, bag_size: 21648\n",
      "batch 399, loss: 0.3213, instance_loss: 0.0045, weighted_loss: 0.2263, label: 2, bag_size: 28252\n",
      "batch 419, loss: 0.0087, instance_loss: 0.0022, weighted_loss: 0.0068, label: 0, bag_size: 36475\n",
      "batch 439, loss: 0.0121, instance_loss: 0.0028, weighted_loss: 0.0093, label: 0, bag_size: 40166\n",
      "batch 459, loss: 0.2449, instance_loss: 0.0131, weighted_loss: 0.1754, label: 1, bag_size: 9486\n",
      "batch 479, loss: 0.0158, instance_loss: 0.0005, weighted_loss: 0.0112, label: 1, bag_size: 54288\n",
      "batch 499, loss: 0.0122, instance_loss: 0.0102, weighted_loss: 0.0116, label: 1, bag_size: 62739\n",
      "batch 519, loss: 0.0391, instance_loss: 0.1118, weighted_loss: 0.0609, label: 2, bag_size: 1329\n",
      "batch 539, loss: 0.3558, instance_loss: 0.0036, weighted_loss: 0.2501, label: 1, bag_size: 74256\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 19, train_loss: 0.1497, train_clustering_loss:  0.0058, train_error: 0.0511\n",
      "class 0: acc 0.9306930693069307, correct 188/202\n",
      "class 1: acc 0.9325842696629213, correct 166/178\n",
      "class 2: acc 0.9880952380952381, correct 166/168\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3579, val_error: 0.1475, auc: 0.9762\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.918918918918919, correct 34/37\n",
      "class 1: acc 0.7368421052631579, correct 14/19\n",
      "class 2: acc 0.8, correct 4/5\n",
      "Validation loss decreased (0.975262 --> 0.976162).  Saving model ...\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.974099099099099\n",
      "0.968671679197995\n",
      "0.9857142857142858\n",
      "Val error: 0.1475, ROC AUC: 0.9762\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.9853658536585366\n",
      "0.9883203559510567\n",
      "0.9978902953586498\n",
      "Test error: 0.0879, ROC AUC: 0.9905\n",
      "class 0: acc 0.88, correct 44/50\n",
      "class 1: acc 0.9310344827586207, correct 27/29\n",
      "class 2: acc 1.0, correct 12/12\n",
      "   Unnamed: 0       case_id  \\\n",
      "0           0  TCGA-4A-A93X   \n",
      "1           1  TCGA-B3-4104   \n",
      "2           2  TCGA-BP-4963   \n",
      "3           3  TCGA-BP-5170   \n",
      "4           4  TCGA-BP-5175   \n",
      "\n",
      "                                            slide_id oncotree_code site   age  \\\n",
      "0  TCGA-4A-A93X-01Z-00-DX2.45011BF1-FED8-4D22-B5E...          PRCC   4A  58.0   \n",
      "1  TCGA-B3-4104-01Z-00-DX1.0783e269-2e8a-4f32-b91...          PRCC   B3  75.0   \n",
      "2  TCGA-BP-4963-01Z-00-DX1.7e206961-5271-40d3-a96...         CCRCC   BP  63.0   \n",
      "3  TCGA-BP-5170-01Z-00-DX1.ae43bef7-3d81-4f69-be3...         CCRCC   BP  55.0   \n",
      "4  TCGA-BP-5175-01Z-00-DX1.e954ae94-307c-475e-9f6...         CCRCC   BP  60.0   \n",
      "\n",
      "   survival_months  is_female  censorship race label  \n",
      "0            12.81        0.0         1.0    W     1  \n",
      "1            34.46        0.0         1.0    W     1  \n",
      "2            60.25        0.0         1.0    W     0  \n",
      "3            79.24        0.0         1.0    W     0  \n",
      "4            30.62        0.0         1.0    W     0  \n",
      "Traing Data Size ({1.00}): 548 ./RCC/FEATURES_DIRECTORY_beph/pt_files/\n",
      "Cluster file missing\n",
      "Traing Data Size ({1.00}): 61 ./RCC/FEATURES_DIRECTORY_beph/pt_files/\n",
      "Cluster file missing\n",
      "Traing Data Size ({1.00}): 91 ./RCC/FEATURES_DIRECTORY_beph/pt_files/\n",
      "Cluster file missing\n",
      "\n",
      "Training Fold 3!\n",
      "\n",
      "Init train/val/test splits... \n",
      "Done!\n",
      "Training on 548 samples\n",
      "Validating on 61 samples\n",
      "Testing on 91 samples\n",
      "\n",
      "Init loss function... Done!\n",
      "\n",
      "Init Model... Setting tau to 1.0\n",
      "Done!\n",
      "CLAM_SB(\n",
      "  (attention_net): Sequential(\n",
      "    (0): Linear(in_features=384, out_features=384, bias=True)\n",
      "    (1): ReLU()\n",
      "    (2): Dropout(p=0.25, inplace=False)\n",
      "    (3): Attn_Net_Gated(\n",
      "      (attention_a): Sequential(\n",
      "        (0): Linear(in_features=384, out_features=256, bias=True)\n",
      "        (1): Tanh()\n",
      "        (2): Dropout(p=0.25, inplace=False)\n",
      "      )\n",
      "      (attention_b): Sequential(\n",
      "        (0): Linear(in_features=384, out_features=256, bias=True)\n",
      "        (1): Sigmoid()\n",
      "        (2): Dropout(p=0.25, inplace=False)\n",
      "      )\n",
      "      (attention_c): Linear(in_features=256, out_features=1, bias=True)\n",
      "    )\n",
      "  )\n",
      "  (feature_linear1): Linear(in_features=768, out_features=384, bias=True)\n",
      "  (classifiers): Linear(in_features=384, out_features=3, bias=True)\n",
      "  (instance_classifiers): ModuleList(\n",
      "    (0): Linear(in_features=384, out_features=2, bias=True)\n",
      "    (1): Linear(in_features=384, out_features=2, bias=True)\n",
      "    (2): Linear(in_features=384, out_features=2, bias=True)\n",
      "  )\n",
      "  (instance_loss_fn): SmoothTop1SVM()\n",
      ")\n",
      "Total number of parameters: 643978\n",
      "Total number of trainable parameters: 643978\n",
      "\n",
      "Init optimizer ... Done!\n",
      "\n",
      "Init Loaders... Done!\n",
      "\n",
      "Setup EarlyStopping... Done!\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.5599, instance_loss: 1.2491, weighted_loss: 0.7667, label: 2, bag_size: 59810\n",
      "batch 39, loss: 0.7633, instance_loss: 1.2120, weighted_loss: 0.8979, label: 0, bag_size: 56229\n",
      "batch 59, loss: 1.0534, instance_loss: 0.7189, weighted_loss: 0.9530, label: 1, bag_size: 56911\n",
      "batch 79, loss: 0.4827, instance_loss: 0.7875, weighted_loss: 0.5742, label: 2, bag_size: 48881\n",
      "batch 99, loss: 0.5591, instance_loss: 0.5292, weighted_loss: 0.5501, label: 2, bag_size: 87135\n",
      "batch 119, loss: 0.3601, instance_loss: 1.1067, weighted_loss: 0.5841, label: 1, bag_size: 28349\n",
      "batch 139, loss: 0.9627, instance_loss: 0.4037, weighted_loss: 0.7950, label: 0, bag_size: 34548\n",
      "batch 159, loss: 1.3080, instance_loss: 0.7891, weighted_loss: 1.1523, label: 1, bag_size: 5928\n",
      "batch 179, loss: 0.5442, instance_loss: 0.3254, weighted_loss: 0.4786, label: 2, bag_size: 69080\n",
      "batch 199, loss: 0.9830, instance_loss: 0.1516, weighted_loss: 0.7335, label: 0, bag_size: 52270\n",
      "batch 219, loss: 0.1525, instance_loss: 0.7089, weighted_loss: 0.3194, label: 1, bag_size: 72064\n",
      "batch 239, loss: 0.6719, instance_loss: 1.6129, weighted_loss: 0.9542, label: 2, bag_size: 11119\n",
      "batch 259, loss: 0.1744, instance_loss: 0.0191, weighted_loss: 0.1278, label: 0, bag_size: 106095\n",
      "batch 279, loss: 0.2392, instance_loss: 0.3721, weighted_loss: 0.2791, label: 2, bag_size: 50045\n",
      "batch 299, loss: 2.5260, instance_loss: 0.1580, weighted_loss: 1.8156, label: 2, bag_size: 52584\n",
      "batch 319, loss: 0.4075, instance_loss: 0.1420, weighted_loss: 0.3279, label: 2, bag_size: 54040\n",
      "batch 339, loss: 0.3899, instance_loss: 0.0169, weighted_loss: 0.2780, label: 2, bag_size: 72686\n",
      "batch 359, loss: 0.0493, instance_loss: 0.0397, weighted_loss: 0.0464, label: 2, bag_size: 30615\n",
      "batch 379, loss: 0.2009, instance_loss: 0.1033, weighted_loss: 0.1716, label: 1, bag_size: 92385\n",
      "batch 399, loss: 0.1553, instance_loss: 0.0552, weighted_loss: 0.1253, label: 1, bag_size: 15902\n",
      "batch 419, loss: 0.9117, instance_loss: 0.0117, weighted_loss: 0.6417, label: 1, bag_size: 80573\n",
      "batch 439, loss: 1.4870, instance_loss: 0.0596, weighted_loss: 1.0588, label: 1, bag_size: 20115\n",
      "batch 459, loss: 0.5673, instance_loss: 0.0480, weighted_loss: 0.4115, label: 2, bag_size: 79373\n",
      "batch 479, loss: 0.0586, instance_loss: 0.0185, weighted_loss: 0.0465, label: 2, bag_size: 59224\n",
      "batch 499, loss: 0.0304, instance_loss: 0.0162, weighted_loss: 0.0261, label: 2, bag_size: 59810\n",
      "batch 519, loss: 0.0710, instance_loss: 0.1418, weighted_loss: 0.0923, label: 0, bag_size: 14487\n",
      "batch 539, loss: 0.4809, instance_loss: 0.0109, weighted_loss: 0.3399, label: 2, bag_size: 65340\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.9272354014598541: correct 4065/4384\n",
      "class 1 clustering acc 0.8793339416058394: correct 3855/4384\n",
      "Epoch: 0, train_loss: 0.7202, train_clustering_loss:  0.3866, train_error: 0.3047\n",
      "class 0: acc 0.6136363636363636, correct 108/176\n",
      "class 1: acc 0.6511627906976745, correct 112/172\n",
      "class 2: acc 0.805, correct 161/200\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.6216, val_error: 0.2787, auc: 0.9355\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8928571428571429, correct 25/28\n",
      "class 1: acc 0.42857142857142855, correct 9/21\n",
      "class 2: acc 0.8333333333333334, correct 10/12\n",
      "Validation loss decreased (inf --> 0.935549).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.6991, instance_loss: 0.0028, weighted_loss: 0.4902, label: 2, bag_size: 47138\n",
      "batch 39, loss: 0.3288, instance_loss: 0.0208, weighted_loss: 0.2364, label: 0, bag_size: 79156\n",
      "batch 59, loss: 0.1728, instance_loss: 0.0183, weighted_loss: 0.1264, label: 0, bag_size: 53181\n",
      "batch 79, loss: 0.1426, instance_loss: 0.0037, weighted_loss: 0.1009, label: 1, bag_size: 39733\n",
      "batch 99, loss: 0.0041, instance_loss: 0.0725, weighted_loss: 0.0246, label: 1, bag_size: 2939\n",
      "batch 119, loss: 0.8116, instance_loss: 0.0059, weighted_loss: 0.5699, label: 2, bag_size: 76843\n",
      "batch 139, loss: 0.0410, instance_loss: 0.0054, weighted_loss: 0.0303, label: 1, bag_size: 76028\n",
      "batch 159, loss: 0.1020, instance_loss: 0.0257, weighted_loss: 0.0791, label: 2, bag_size: 82484\n",
      "batch 179, loss: 1.5363, instance_loss: 0.0018, weighted_loss: 1.0759, label: 2, bag_size: 91267\n",
      "batch 199, loss: 0.0574, instance_loss: 0.0023, weighted_loss: 0.0409, label: 1, bag_size: 59576\n",
      "batch 219, loss: 0.2849, instance_loss: 0.0109, weighted_loss: 0.2027, label: 0, bag_size: 6579\n",
      "batch 239, loss: 0.1646, instance_loss: 0.0146, weighted_loss: 0.1196, label: 2, bag_size: 87135\n",
      "batch 259, loss: 0.0652, instance_loss: 0.0252, weighted_loss: 0.0532, label: 2, bag_size: 75833\n",
      "batch 279, loss: 0.1091, instance_loss: 0.0075, weighted_loss: 0.0786, label: 2, bag_size: 66345\n",
      "batch 299, loss: 0.1025, instance_loss: 0.0067, weighted_loss: 0.0738, label: 0, bag_size: 21648\n",
      "batch 319, loss: 0.1716, instance_loss: 0.0020, weighted_loss: 0.1207, label: 1, bag_size: 91901\n",
      "batch 339, loss: 0.4696, instance_loss: 0.0168, weighted_loss: 0.3338, label: 1, bag_size: 5928\n",
      "batch 359, loss: 0.0019, instance_loss: 0.0577, weighted_loss: 0.0186, label: 1, bag_size: 11509\n",
      "batch 379, loss: 0.0216, instance_loss: 0.0388, weighted_loss: 0.0267, label: 2, bag_size: 46661\n",
      "batch 399, loss: 0.0886, instance_loss: 0.0046, weighted_loss: 0.0634, label: 1, bag_size: 20374\n",
      "batch 419, loss: 0.2071, instance_loss: 0.0021, weighted_loss: 0.1456, label: 0, bag_size: 36141\n",
      "batch 439, loss: 0.0306, instance_loss: 0.0128, weighted_loss: 0.0253, label: 2, bag_size: 48881\n",
      "batch 459, loss: 0.2570, instance_loss: 0.0033, weighted_loss: 0.1809, label: 1, bag_size: 60502\n",
      "batch 479, loss: 0.1105, instance_loss: 0.0020, weighted_loss: 0.0780, label: 2, bag_size: 34408\n",
      "batch 499, loss: 1.1776, instance_loss: 0.0014, weighted_loss: 0.8247, label: 0, bag_size: 74863\n",
      "batch 519, loss: 0.1770, instance_loss: 0.0045, weighted_loss: 0.1252, label: 2, bag_size: 58111\n",
      "batch 539, loss: 0.0143, instance_loss: 0.1196, weighted_loss: 0.0459, label: 1, bag_size: 44760\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.9970346715328468: correct 4371/4384\n",
      "class 1 clustering acc 0.9977189781021898: correct 4374/4384\n",
      "Epoch: 1, train_loss: 0.4105, train_clustering_loss:  0.0265, train_error: 0.1679\n",
      "class 0: acc 0.7954545454545454, correct 140/176\n",
      "class 1: acc 0.7738095238095238, correct 130/168\n",
      "class 2: acc 0.9117647058823529, correct 186/204\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3640, val_error: 0.1803, auc: 0.9668\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8214285714285714, correct 23/28\n",
      "class 1: acc 0.7619047619047619, correct 16/21\n",
      "class 2: acc 0.9166666666666666, correct 11/12\n",
      "Validation loss decreased (0.935549 --> 0.966816).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.1996, instance_loss: 0.0112, weighted_loss: 0.1431, label: 2, bag_size: 11415\n",
      "batch 39, loss: 0.1378, instance_loss: 0.0015, weighted_loss: 0.0969, label: 2, bag_size: 69002\n",
      "batch 59, loss: 0.0670, instance_loss: 0.0044, weighted_loss: 0.0483, label: 0, bag_size: 79176\n",
      "batch 79, loss: 0.5409, instance_loss: 0.0111, weighted_loss: 0.3820, label: 1, bag_size: 61193\n",
      "batch 99, loss: 0.1423, instance_loss: 0.0060, weighted_loss: 0.1014, label: 2, bag_size: 59810\n",
      "batch 119, loss: 0.0356, instance_loss: 0.0108, weighted_loss: 0.0281, label: 2, bag_size: 48593\n",
      "batch 139, loss: 0.0622, instance_loss: 0.0068, weighted_loss: 0.0455, label: 1, bag_size: 81168\n",
      "batch 159, loss: 0.1580, instance_loss: 0.0024, weighted_loss: 0.1113, label: 0, bag_size: 59830\n",
      "batch 179, loss: 0.0092, instance_loss: 0.0110, weighted_loss: 0.0098, label: 2, bag_size: 65340\n",
      "batch 199, loss: 0.1025, instance_loss: 0.0024, weighted_loss: 0.0725, label: 2, bag_size: 54040\n",
      "batch 219, loss: 0.0453, instance_loss: 0.1626, weighted_loss: 0.0805, label: 2, bag_size: 30615\n",
      "batch 239, loss: 0.0081, instance_loss: 0.0044, weighted_loss: 0.0070, label: 0, bag_size: 87979\n",
      "batch 259, loss: 0.0261, instance_loss: 0.0042, weighted_loss: 0.0195, label: 0, bag_size: 39692\n",
      "batch 279, loss: 0.0076, instance_loss: 0.0065, weighted_loss: 0.0073, label: 0, bag_size: 48547\n",
      "batch 299, loss: 0.0578, instance_loss: 0.0027, weighted_loss: 0.0413, label: 2, bag_size: 50246\n",
      "batch 319, loss: 1.3289, instance_loss: 0.0011, weighted_loss: 0.9305, label: 1, bag_size: 42983\n",
      "batch 339, loss: 0.1055, instance_loss: 0.0029, weighted_loss: 0.0747, label: 2, bag_size: 79690\n",
      "batch 359, loss: 0.5912, instance_loss: 0.0002, weighted_loss: 0.4139, label: 0, bag_size: 38129\n",
      "batch 379, loss: 0.0242, instance_loss: 0.0091, weighted_loss: 0.0197, label: 1, bag_size: 66357\n",
      "batch 399, loss: 0.2173, instance_loss: 0.0008, weighted_loss: 0.1523, label: 1, bag_size: 54398\n",
      "batch 419, loss: 0.0472, instance_loss: 0.0017, weighted_loss: 0.0336, label: 2, bag_size: 40315\n",
      "batch 439, loss: 0.9771, instance_loss: 0.0467, weighted_loss: 0.6979, label: 0, bag_size: 28029\n",
      "batch 459, loss: 0.0353, instance_loss: 0.0045, weighted_loss: 0.0260, label: 2, bag_size: 76037\n",
      "batch 479, loss: 0.0178, instance_loss: 0.0016, weighted_loss: 0.0129, label: 0, bag_size: 53908\n",
      "batch 499, loss: 0.0148, instance_loss: 0.0000, weighted_loss: 0.0104, label: 1, bag_size: 55270\n",
      "batch 519, loss: 0.4307, instance_loss: 0.0013, weighted_loss: 0.3018, label: 2, bag_size: 91267\n",
      "batch 539, loss: 3.0420, instance_loss: 0.0066, weighted_loss: 2.1314, label: 2, bag_size: 23841\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.9984032846715328: correct 4377/4384\n",
      "class 1 clustering acc 0.9981751824817519: correct 4376/4384\n",
      "Epoch: 2, train_loss: 0.3051, train_clustering_loss:  0.0188, train_error: 0.1095\n",
      "class 0: acc 0.8698224852071006, correct 147/169\n",
      "class 1: acc 0.8645833333333334, correct 166/192\n",
      "class 2: acc 0.9358288770053476, correct 175/187\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4044, val_error: 0.0984, auc: 0.9736\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8214285714285714, correct 23/28\n",
      "class 1: acc 1.0, correct 21/21\n",
      "class 2: acc 0.9166666666666666, correct 11/12\n",
      "Validation loss decreased (0.966816 --> 0.973629).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 1.1970, instance_loss: 0.0020, weighted_loss: 0.8385, label: 2, bag_size: 65317\n",
      "batch 39, loss: 0.1565, instance_loss: 0.0004, weighted_loss: 0.1097, label: 1, bag_size: 62303\n",
      "batch 59, loss: 0.0433, instance_loss: 0.0008, weighted_loss: 0.0305, label: 2, bag_size: 54265\n",
      "batch 79, loss: 0.1039, instance_loss: 0.0006, weighted_loss: 0.0729, label: 1, bag_size: 42997\n",
      "batch 99, loss: 0.0481, instance_loss: 0.0019, weighted_loss: 0.0342, label: 0, bag_size: 58267\n",
      "batch 119, loss: 0.0100, instance_loss: 0.0009, weighted_loss: 0.0073, label: 0, bag_size: 92719\n",
      "batch 139, loss: 0.0003, instance_loss: 0.0140, weighted_loss: 0.0044, label: 1, bag_size: 27124\n",
      "batch 159, loss: 0.0019, instance_loss: 0.0118, weighted_loss: 0.0049, label: 0, bag_size: 9018\n",
      "batch 179, loss: 0.5148, instance_loss: 0.0033, weighted_loss: 0.3613, label: 0, bag_size: 30015\n",
      "batch 199, loss: 1.1545, instance_loss: 0.0074, weighted_loss: 0.8104, label: 2, bag_size: 51554\n",
      "batch 219, loss: 1.9058, instance_loss: 0.0144, weighted_loss: 1.3383, label: 1, bag_size: 31015\n",
      "batch 239, loss: 0.1790, instance_loss: 0.0009, weighted_loss: 0.1256, label: 0, bag_size: 70854\n",
      "batch 259, loss: 0.0755, instance_loss: 0.0067, weighted_loss: 0.0549, label: 2, bag_size: 54040\n",
      "batch 279, loss: 0.0053, instance_loss: 0.0037, weighted_loss: 0.0048, label: 0, bag_size: 55538\n",
      "batch 299, loss: 0.0202, instance_loss: 0.0177, weighted_loss: 0.0195, label: 0, bag_size: 12044\n",
      "batch 319, loss: 0.0269, instance_loss: 0.0057, weighted_loss: 0.0205, label: 1, bag_size: 113186\n",
      "batch 339, loss: 2.5248, instance_loss: 0.0945, weighted_loss: 1.7957, label: 0, bag_size: 9339\n",
      "batch 359, loss: 0.3891, instance_loss: 0.0004, weighted_loss: 0.2725, label: 1, bag_size: 49727\n",
      "batch 379, loss: 0.0100, instance_loss: 0.0013, weighted_loss: 0.0074, label: 1, bag_size: 19703\n",
      "batch 399, loss: 0.9193, instance_loss: 0.0026, weighted_loss: 0.6442, label: 1, bag_size: 67478\n",
      "batch 419, loss: 0.4800, instance_loss: 0.0000, weighted_loss: 0.3360, label: 2, bag_size: 52924\n",
      "batch 439, loss: 0.0014, instance_loss: 0.0000, weighted_loss: 0.0010, label: 0, bag_size: 65462\n",
      "batch 459, loss: 0.5550, instance_loss: 0.0014, weighted_loss: 0.3889, label: 1, bag_size: 29399\n",
      "batch 479, loss: 0.4077, instance_loss: 0.0039, weighted_loss: 0.2865, label: 1, bag_size: 21267\n",
      "batch 499, loss: 1.3577, instance_loss: 0.0053, weighted_loss: 0.9520, label: 0, bag_size: 38066\n",
      "batch 519, loss: 0.0210, instance_loss: 0.0009, weighted_loss: 0.0150, label: 0, bag_size: 70204\n",
      "batch 539, loss: 0.2809, instance_loss: 0.0010, weighted_loss: 0.1969, label: 2, bag_size: 15486\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 3, train_loss: 0.2738, train_clustering_loss:  0.0073, train_error: 0.1150\n",
      "class 0: acc 0.8395061728395061, correct 136/162\n",
      "class 1: acc 0.8677248677248677, correct 164/189\n",
      "class 2: acc 0.9390862944162437, correct 185/197\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4064, val_error: 0.1639, auc: 0.9675\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.7857142857142857, correct 22/28\n",
      "class 1: acc 0.8571428571428571, correct 18/21\n",
      "class 2: acc 0.9166666666666666, correct 11/12\n",
      "EarlyStopping counter: 1 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0434, instance_loss: 0.0065, weighted_loss: 0.0323, label: 2, bag_size: 50978\n",
      "batch 39, loss: 0.0335, instance_loss: 0.0104, weighted_loss: 0.0266, label: 0, bag_size: 72844\n",
      "batch 59, loss: 0.0449, instance_loss: 0.0087, weighted_loss: 0.0340, label: 1, bag_size: 35189\n",
      "batch 79, loss: 0.0004, instance_loss: 0.0033, weighted_loss: 0.0013, label: 0, bag_size: 45490\n",
      "batch 99, loss: 0.1799, instance_loss: 0.0384, weighted_loss: 0.1375, label: 1, bag_size: 5928\n",
      "batch 119, loss: 2.3923, instance_loss: 0.0289, weighted_loss: 1.6833, label: 1, bag_size: 22039\n",
      "batch 139, loss: 3.6515, instance_loss: 0.0032, weighted_loss: 2.5570, label: 1, bag_size: 96719\n",
      "batch 159, loss: 0.0048, instance_loss: 0.0005, weighted_loss: 0.0035, label: 0, bag_size: 87072\n",
      "batch 179, loss: 3.4674, instance_loss: 0.0153, weighted_loss: 2.4317, label: 0, bag_size: 59783\n",
      "batch 199, loss: 0.0302, instance_loss: 0.0101, weighted_loss: 0.0242, label: 0, bag_size: 10082\n",
      "batch 219, loss: 0.0739, instance_loss: 0.0077, weighted_loss: 0.0540, label: 0, bag_size: 60709\n",
      "batch 239, loss: 1.9216, instance_loss: 0.0015, weighted_loss: 1.3456, label: 0, bag_size: 59783\n",
      "batch 259, loss: 1.6188, instance_loss: 0.0053, weighted_loss: 1.1348, label: 1, bag_size: 96719\n",
      "batch 279, loss: 0.8825, instance_loss: 0.0007, weighted_loss: 0.6179, label: 1, bag_size: 102214\n",
      "batch 299, loss: 0.3293, instance_loss: 0.0025, weighted_loss: 0.2313, label: 2, bag_size: 58111\n",
      "batch 319, loss: 0.0049, instance_loss: 0.0005, weighted_loss: 0.0036, label: 2, bag_size: 69002\n",
      "batch 339, loss: 0.0052, instance_loss: 0.0024, weighted_loss: 0.0043, label: 1, bag_size: 74826\n",
      "batch 359, loss: 0.4391, instance_loss: 0.0033, weighted_loss: 0.3083, label: 2, bag_size: 51017\n",
      "batch 379, loss: 1.0809, instance_loss: 0.0128, weighted_loss: 0.7605, label: 2, bag_size: 64675\n",
      "batch 399, loss: 0.0197, instance_loss: 0.0043, weighted_loss: 0.0151, label: 0, bag_size: 23076\n",
      "batch 419, loss: 0.0235, instance_loss: 0.0476, weighted_loss: 0.0308, label: 2, bag_size: 12043\n",
      "batch 439, loss: 0.0149, instance_loss: 0.0051, weighted_loss: 0.0120, label: 1, bag_size: 46625\n",
      "batch 459, loss: 0.2876, instance_loss: 0.0527, weighted_loss: 0.2171, label: 2, bag_size: 66801\n",
      "batch 479, loss: 0.0012, instance_loss: 0.0004, weighted_loss: 0.0009, label: 0, bag_size: 79446\n",
      "batch 499, loss: 0.0418, instance_loss: 0.0089, weighted_loss: 0.0319, label: 1, bag_size: 35189\n",
      "batch 519, loss: 0.0033, instance_loss: 0.0021, weighted_loss: 0.0029, label: 1, bag_size: 19703\n",
      "batch 539, loss: 0.0647, instance_loss: 0.0037, weighted_loss: 0.0464, label: 2, bag_size: 50978\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.999315693430657: correct 4381/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 4, train_loss: 0.2409, train_clustering_loss:  0.0107, train_error: 0.0985\n",
      "class 0: acc 0.8816568047337278, correct 149/169\n",
      "class 1: acc 0.8756756756756757, correct 162/185\n",
      "class 2: acc 0.9432989690721649, correct 183/194\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4035, val_error: 0.0984, auc: 0.9730\n",
      "class 0 clustering acc 0.9979508196721312: correct 487/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8214285714285714, correct 23/28\n",
      "class 1: acc 1.0, correct 21/21\n",
      "class 2: acc 0.9166666666666666, correct 11/12\n",
      "EarlyStopping counter: 2 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0080, instance_loss: 0.0016, weighted_loss: 0.0061, label: 1, bag_size: 39733\n",
      "batch 39, loss: 0.0503, instance_loss: 0.0034, weighted_loss: 0.0362, label: 2, bag_size: 38471\n",
      "batch 59, loss: 3.6283, instance_loss: 0.0048, weighted_loss: 2.5413, label: 1, bag_size: 112934\n",
      "batch 79, loss: 0.0019, instance_loss: 0.0013, weighted_loss: 0.0017, label: 0, bag_size: 114059\n",
      "batch 99, loss: 0.0253, instance_loss: 0.0179, weighted_loss: 0.0231, label: 0, bag_size: 7780\n",
      "batch 119, loss: 0.0144, instance_loss: 0.0019, weighted_loss: 0.0106, label: 0, bag_size: 114320\n",
      "batch 139, loss: 0.1918, instance_loss: 0.0008, weighted_loss: 0.1345, label: 1, bag_size: 78398\n",
      "batch 159, loss: 0.9118, instance_loss: 0.0020, weighted_loss: 0.6389, label: 0, bag_size: 111148\n",
      "batch 179, loss: 0.1852, instance_loss: 0.0563, weighted_loss: 0.1465, label: 0, bag_size: 91903\n",
      "batch 199, loss: 0.1651, instance_loss: 0.0006, weighted_loss: 0.1157, label: 1, bag_size: 114116\n",
      "batch 219, loss: 0.0006, instance_loss: 0.0008, weighted_loss: 0.0006, label: 0, bag_size: 41071\n",
      "batch 239, loss: 0.0157, instance_loss: 0.0056, weighted_loss: 0.0127, label: 2, bag_size: 69002\n",
      "batch 259, loss: 0.0289, instance_loss: 0.0026, weighted_loss: 0.0210, label: 2, bag_size: 51017\n",
      "batch 279, loss: 0.0323, instance_loss: 0.0499, weighted_loss: 0.0375, label: 1, bag_size: 47660\n",
      "batch 299, loss: 0.0016, instance_loss: 0.0024, weighted_loss: 0.0019, label: 0, bag_size: 26331\n",
      "batch 319, loss: 0.0001, instance_loss: 0.0037, weighted_loss: 0.0012, label: 0, bag_size: 14487\n",
      "batch 339, loss: 0.4045, instance_loss: 0.0103, weighted_loss: 0.2862, label: 2, bag_size: 10243\n",
      "batch 359, loss: 0.0025, instance_loss: 0.0007, weighted_loss: 0.0020, label: 0, bag_size: 59200\n",
      "batch 379, loss: 0.1221, instance_loss: 0.0089, weighted_loss: 0.0882, label: 1, bag_size: 5928\n",
      "batch 399, loss: 0.0171, instance_loss: 0.0013, weighted_loss: 0.0124, label: 2, bag_size: 54040\n",
      "batch 419, loss: 0.0334, instance_loss: 0.0153, weighted_loss: 0.0280, label: 1, bag_size: 27666\n",
      "batch 439, loss: 0.0755, instance_loss: 0.0005, weighted_loss: 0.0530, label: 1, bag_size: 84060\n",
      "batch 459, loss: 0.0316, instance_loss: 0.0002, weighted_loss: 0.0221, label: 1, bag_size: 54288\n",
      "batch 479, loss: 0.0014, instance_loss: 0.0005, weighted_loss: 0.0012, label: 0, bag_size: 114059\n",
      "batch 499, loss: 0.0029, instance_loss: 0.0340, weighted_loss: 0.0122, label: 1, bag_size: 4094\n",
      "batch 519, loss: 0.6323, instance_loss: 0.0024, weighted_loss: 0.4433, label: 1, bag_size: 67478\n",
      "batch 539, loss: 0.0156, instance_loss: 0.0009, weighted_loss: 0.0112, label: 2, bag_size: 54040\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.999315693430657: correct 4381/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 5, train_loss: 0.2097, train_clustering_loss:  0.0106, train_error: 0.0821\n",
      "class 0: acc 0.8936170212765957, correct 168/188\n",
      "class 1: acc 0.9044943820224719, correct 161/178\n",
      "class 2: acc 0.9560439560439561, correct 174/182\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3756, val_error: 0.1148, auc: 0.9710\n",
      "class 0 clustering acc 0.9979508196721312: correct 487/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8571428571428571, correct 24/28\n",
      "class 1: acc 0.9047619047619048, correct 19/21\n",
      "class 2: acc 0.9166666666666666, correct 11/12\n",
      "EarlyStopping counter: 3 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 2.6164, instance_loss: 0.0033, weighted_loss: 1.8325, label: 2, bag_size: 78955\n",
      "batch 39, loss: 0.0445, instance_loss: 0.0000, weighted_loss: 0.0312, label: 0, bag_size: 75311\n",
      "batch 59, loss: 0.0581, instance_loss: 0.0189, weighted_loss: 0.0463, label: 1, bag_size: 28349\n",
      "batch 79, loss: 0.0043, instance_loss: 0.0047, weighted_loss: 0.0045, label: 2, bag_size: 72966\n",
      "batch 99, loss: 0.0149, instance_loss: 0.0022, weighted_loss: 0.0111, label: 0, bag_size: 53188\n",
      "batch 119, loss: 0.0029, instance_loss: 0.0032, weighted_loss: 0.0030, label: 2, bag_size: 68187\n",
      "batch 139, loss: 0.0078, instance_loss: 0.0177, weighted_loss: 0.0108, label: 0, bag_size: 71463\n",
      "batch 159, loss: 0.0363, instance_loss: 0.0040, weighted_loss: 0.0266, label: 0, bag_size: 43244\n",
      "batch 179, loss: 0.0063, instance_loss: 0.0523, weighted_loss: 0.0201, label: 0, bag_size: 23506\n",
      "batch 199, loss: 0.0007, instance_loss: 0.0177, weighted_loss: 0.0058, label: 2, bag_size: 51251\n",
      "batch 219, loss: 0.0022, instance_loss: 0.0007, weighted_loss: 0.0018, label: 0, bag_size: 75202\n",
      "batch 239, loss: 0.0557, instance_loss: 0.0008, weighted_loss: 0.0392, label: 0, bag_size: 88181\n",
      "batch 259, loss: 0.4342, instance_loss: 0.0027, weighted_loss: 0.3047, label: 1, bag_size: 31015\n",
      "batch 279, loss: 0.1339, instance_loss: 0.0002, weighted_loss: 0.0938, label: 1, bag_size: 92502\n",
      "batch 299, loss: 0.0107, instance_loss: 0.0048, weighted_loss: 0.0089, label: 1, bag_size: 60316\n",
      "batch 319, loss: 0.0013, instance_loss: 0.0000, weighted_loss: 0.0009, label: 0, bag_size: 43308\n",
      "batch 339, loss: 0.7289, instance_loss: 0.0005, weighted_loss: 0.5104, label: 0, bag_size: 56229\n",
      "batch 359, loss: 0.0001, instance_loss: 0.0019, weighted_loss: 0.0006, label: 1, bag_size: 57077\n",
      "batch 379, loss: 0.7830, instance_loss: 0.0037, weighted_loss: 0.5492, label: 2, bag_size: 76843\n",
      "batch 399, loss: 0.0038, instance_loss: 0.0021, weighted_loss: 0.0033, label: 1, bag_size: 78482\n",
      "batch 419, loss: 0.0154, instance_loss: 0.0023, weighted_loss: 0.0115, label: 0, bag_size: 77113\n",
      "batch 439, loss: 0.0177, instance_loss: 0.0015, weighted_loss: 0.0128, label: 0, bag_size: 19019\n",
      "batch 459, loss: 0.0044, instance_loss: 0.0014, weighted_loss: 0.0035, label: 2, bag_size: 76037\n",
      "batch 479, loss: 0.0068, instance_loss: 0.0004, weighted_loss: 0.0049, label: 2, bag_size: 38471\n",
      "batch 499, loss: 0.0464, instance_loss: 0.0000, weighted_loss: 0.0325, label: 1, bag_size: 105100\n",
      "batch 519, loss: 0.0016, instance_loss: 0.0037, weighted_loss: 0.0022, label: 1, bag_size: 54204\n",
      "batch 539, loss: 1.7548, instance_loss: 0.0058, weighted_loss: 1.2301, label: 0, bag_size: 28795\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.9990875912408759: correct 4380/4384\n",
      "Epoch: 6, train_loss: 0.1662, train_clustering_loss:  0.0082, train_error: 0.0529\n",
      "class 0: acc 0.9347826086956522, correct 172/184\n",
      "class 1: acc 0.9222222222222223, correct 166/180\n",
      "class 2: acc 0.9836956521739131, correct 181/184\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3359, val_error: 0.1148, auc: 0.9701\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.9285714285714286, correct 26/28\n",
      "class 1: acc 0.8571428571428571, correct 18/21\n",
      "class 2: acc 0.8333333333333334, correct 10/12\n",
      "EarlyStopping counter: 4 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 1.8923, instance_loss: 0.0014, weighted_loss: 1.3250, label: 0, bag_size: 11284\n",
      "batch 39, loss: 0.0872, instance_loss: 0.0013, weighted_loss: 0.0614, label: 2, bag_size: 51554\n",
      "batch 59, loss: 0.0002, instance_loss: 0.0004, weighted_loss: 0.0002, label: 0, bag_size: 85829\n",
      "batch 79, loss: 0.0009, instance_loss: 0.0025, weighted_loss: 0.0014, label: 1, bag_size: 66357\n",
      "batch 99, loss: 0.0439, instance_loss: 0.0029, weighted_loss: 0.0316, label: 0, bag_size: 60909\n",
      "batch 119, loss: 0.0900, instance_loss: 0.0018, weighted_loss: 0.0635, label: 2, bag_size: 50246\n",
      "batch 139, loss: 1.2892, instance_loss: 0.0061, weighted_loss: 0.9043, label: 2, bag_size: 78955\n",
      "batch 159, loss: 6.5591, instance_loss: 0.0376, weighted_loss: 4.6027, label: 0, bag_size: 71427\n",
      "batch 179, loss: 0.0138, instance_loss: 0.0012, weighted_loss: 0.0100, label: 2, bag_size: 47603\n",
      "batch 199, loss: 0.0247, instance_loss: 0.0004, weighted_loss: 0.0174, label: 2, bag_size: 47138\n",
      "batch 219, loss: 0.0003, instance_loss: 0.0010, weighted_loss: 0.0005, label: 0, bag_size: 45846\n",
      "batch 239, loss: 0.9233, instance_loss: 0.0047, weighted_loss: 0.6477, label: 1, bag_size: 38177\n",
      "batch 259, loss: 0.0099, instance_loss: 0.0043, weighted_loss: 0.0082, label: 1, bag_size: 91901\n",
      "batch 279, loss: 0.0039, instance_loss: 0.0019, weighted_loss: 0.0033, label: 2, bag_size: 73189\n",
      "batch 299, loss: 2.0749, instance_loss: 0.0000, weighted_loss: 1.4524, label: 1, bag_size: 112934\n",
      "batch 319, loss: 0.0093, instance_loss: 0.0030, weighted_loss: 0.0074, label: 1, bag_size: 46625\n",
      "batch 339, loss: 0.0347, instance_loss: 0.0003, weighted_loss: 0.0244, label: 0, bag_size: 76011\n",
      "batch 359, loss: 0.8384, instance_loss: 0.0002, weighted_loss: 0.5870, label: 1, bag_size: 102214\n",
      "batch 379, loss: 0.0020, instance_loss: 0.0047, weighted_loss: 0.0028, label: 1, bag_size: 51100\n",
      "batch 399, loss: 0.0001, instance_loss: 0.0125, weighted_loss: 0.0038, label: 2, bag_size: 51251\n",
      "batch 419, loss: 0.1641, instance_loss: 0.0086, weighted_loss: 0.1175, label: 1, bag_size: 10520\n",
      "batch 439, loss: 0.0074, instance_loss: 0.0056, weighted_loss: 0.0068, label: 2, bag_size: 50978\n",
      "batch 459, loss: 0.1992, instance_loss: 0.0003, weighted_loss: 0.1395, label: 1, bag_size: 49548\n",
      "batch 479, loss: 0.0004, instance_loss: 0.0014, weighted_loss: 0.0007, label: 0, bag_size: 64555\n",
      "batch 499, loss: 0.0044, instance_loss: 0.0057, weighted_loss: 0.0048, label: 2, bag_size: 68187\n",
      "batch 519, loss: 0.6515, instance_loss: 0.0021, weighted_loss: 0.4567, label: 0, bag_size: 68879\n",
      "batch 539, loss: 0.0790, instance_loss: 0.0211, weighted_loss: 0.0617, label: 1, bag_size: 47660\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.999771897810219: correct 4383/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 7, train_loss: 0.2101, train_clustering_loss:  0.0069, train_error: 0.0730\n",
      "class 0: acc 0.8850574712643678, correct 154/174\n",
      "class 1: acc 0.9270833333333334, correct 178/192\n",
      "class 2: acc 0.967032967032967, correct 176/182\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3896, val_error: 0.0984, auc: 0.9653\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8571428571428571, correct 24/28\n",
      "class 1: acc 0.9523809523809523, correct 20/21\n",
      "class 2: acc 0.9166666666666666, correct 11/12\n",
      "EarlyStopping counter: 5 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0457, instance_loss: 0.0077, weighted_loss: 0.0343, label: 2, bag_size: 54265\n",
      "batch 39, loss: 0.7281, instance_loss: 0.0013, weighted_loss: 0.5101, label: 0, bag_size: 66469\n",
      "batch 59, loss: 0.0106, instance_loss: 0.0004, weighted_loss: 0.0075, label: 1, bag_size: 49463\n",
      "batch 79, loss: 0.9369, instance_loss: 0.0015, weighted_loss: 0.6563, label: 2, bag_size: 51554\n",
      "batch 99, loss: 0.0024, instance_loss: 0.0008, weighted_loss: 0.0019, label: 0, bag_size: 41071\n",
      "batch 119, loss: 0.0195, instance_loss: 0.0220, weighted_loss: 0.0202, label: 2, bag_size: 66801\n",
      "batch 139, loss: 0.1891, instance_loss: 0.0138, weighted_loss: 0.1365, label: 0, bag_size: 31879\n",
      "batch 159, loss: 0.2166, instance_loss: 0.0033, weighted_loss: 0.1526, label: 0, bag_size: 87503\n",
      "batch 179, loss: 0.9443, instance_loss: 0.0024, weighted_loss: 0.6617, label: 0, bag_size: 56229\n",
      "batch 199, loss: 0.3202, instance_loss: 0.0013, weighted_loss: 0.2245, label: 0, bag_size: 74863\n",
      "batch 219, loss: 0.0512, instance_loss: 0.0073, weighted_loss: 0.0381, label: 0, bag_size: 80774\n",
      "batch 239, loss: 0.0771, instance_loss: 0.0030, weighted_loss: 0.0548, label: 2, bag_size: 68302\n",
      "batch 259, loss: 0.2327, instance_loss: 0.1519, weighted_loss: 0.2084, label: 2, bag_size: 40315\n",
      "batch 279, loss: 0.3551, instance_loss: 0.0022, weighted_loss: 0.2492, label: 1, bag_size: 49139\n",
      "batch 299, loss: 0.0658, instance_loss: 0.0016, weighted_loss: 0.0466, label: 0, bag_size: 91903\n",
      "batch 319, loss: 0.5596, instance_loss: 0.0365, weighted_loss: 0.4027, label: 0, bag_size: 38066\n",
      "batch 339, loss: 0.3793, instance_loss: 0.0004, weighted_loss: 0.2656, label: 1, bag_size: 49548\n",
      "batch 359, loss: 0.0069, instance_loss: 0.0047, weighted_loss: 0.0062, label: 1, bag_size: 23894\n",
      "batch 379, loss: 0.0014, instance_loss: 0.0025, weighted_loss: 0.0017, label: 2, bag_size: 46661\n",
      "batch 399, loss: 0.0016, instance_loss: 0.0031, weighted_loss: 0.0021, label: 0, bag_size: 65462\n",
      "batch 419, loss: 0.0014, instance_loss: 0.0011, weighted_loss: 0.0013, label: 2, bag_size: 72966\n",
      "batch 439, loss: 2.1253, instance_loss: 0.0034, weighted_loss: 1.4888, label: 0, bag_size: 66469\n",
      "batch 459, loss: 0.1346, instance_loss: 0.0054, weighted_loss: 0.0958, label: 2, bag_size: 55626\n",
      "batch 479, loss: 0.0024, instance_loss: 0.0031, weighted_loss: 0.0026, label: 2, bag_size: 64488\n",
      "batch 499, loss: 0.0134, instance_loss: 0.0027, weighted_loss: 0.0102, label: 2, bag_size: 55626\n",
      "batch 519, loss: 1.0743, instance_loss: 0.0037, weighted_loss: 0.7532, label: 0, bag_size: 53401\n",
      "batch 539, loss: 0.0072, instance_loss: 0.0005, weighted_loss: 0.0052, label: 0, bag_size: 79972\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.9990875912408759: correct 4380/4384\n",
      "Epoch: 8, train_loss: 0.1804, train_clustering_loss:  0.0086, train_error: 0.0712\n",
      "class 0: acc 0.912568306010929, correct 167/183\n",
      "class 1: acc 0.9175257731958762, correct 178/194\n",
      "class 2: acc 0.9590643274853801, correct 164/171\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3355, val_error: 0.1148, auc: 0.9755\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8571428571428571, correct 24/28\n",
      "class 1: acc 0.9047619047619048, correct 19/21\n",
      "class 2: acc 0.9166666666666666, correct 11/12\n",
      "Validation loss decreased (0.973629 --> 0.975474).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0009, instance_loss: 0.0021, weighted_loss: 0.0013, label: 0, bag_size: 16262\n",
      "batch 39, loss: 0.4634, instance_loss: 0.0060, weighted_loss: 0.3262, label: 0, bag_size: 56311\n",
      "batch 59, loss: 0.0027, instance_loss: 0.0020, weighted_loss: 0.0025, label: 0, bag_size: 53188\n",
      "batch 79, loss: 0.0041, instance_loss: 0.0049, weighted_loss: 0.0044, label: 0, bag_size: 78549\n",
      "batch 99, loss: 0.1973, instance_loss: 0.0052, weighted_loss: 0.1397, label: 2, bag_size: 51554\n",
      "batch 119, loss: 0.0012, instance_loss: 0.0204, weighted_loss: 0.0070, label: 2, bag_size: 84436\n",
      "batch 139, loss: 0.0543, instance_loss: 0.0020, weighted_loss: 0.0386, label: 2, bag_size: 51017\n",
      "batch 159, loss: 0.0105, instance_loss: 0.0077, weighted_loss: 0.0096, label: 0, bag_size: 14481\n",
      "batch 179, loss: 0.0355, instance_loss: 0.0081, weighted_loss: 0.0273, label: 0, bag_size: 30263\n",
      "batch 199, loss: 0.0108, instance_loss: 0.0168, weighted_loss: 0.0126, label: 2, bag_size: 76037\n",
      "batch 219, loss: 0.5131, instance_loss: 0.0021, weighted_loss: 0.3598, label: 1, bag_size: 35236\n",
      "batch 239, loss: 0.5866, instance_loss: 0.0078, weighted_loss: 0.4129, label: 2, bag_size: 65317\n",
      "batch 259, loss: 0.0062, instance_loss: 0.0000, weighted_loss: 0.0043, label: 2, bag_size: 103823\n",
      "batch 279, loss: 0.0156, instance_loss: 0.0009, weighted_loss: 0.0112, label: 1, bag_size: 86381\n",
      "batch 299, loss: 0.0135, instance_loss: 0.0150, weighted_loss: 0.0140, label: 2, bag_size: 46661\n",
      "batch 319, loss: 0.0227, instance_loss: 0.0169, weighted_loss: 0.0210, label: 1, bag_size: 39620\n",
      "batch 339, loss: 0.0008, instance_loss: 0.0024, weighted_loss: 0.0013, label: 2, bag_size: 69080\n",
      "batch 359, loss: 0.0050, instance_loss: 0.0000, weighted_loss: 0.0035, label: 0, bag_size: 57545\n",
      "batch 379, loss: 1.7840, instance_loss: 0.0017, weighted_loss: 1.2493, label: 0, bag_size: 51028\n",
      "batch 399, loss: 0.0002, instance_loss: 0.0003, weighted_loss: 0.0002, label: 0, bag_size: 9321\n",
      "batch 419, loss: 0.2365, instance_loss: 0.0011, weighted_loss: 0.1659, label: 1, bag_size: 35236\n",
      "batch 439, loss: 0.1193, instance_loss: 0.0072, weighted_loss: 0.0857, label: 1, bag_size: 18525\n",
      "batch 459, loss: 0.0020, instance_loss: 0.0011, weighted_loss: 0.0017, label: 2, bag_size: 54040\n",
      "batch 479, loss: 0.1395, instance_loss: 0.0067, weighted_loss: 0.0997, label: 0, bag_size: 30015\n",
      "batch 499, loss: 0.2008, instance_loss: 0.0046, weighted_loss: 0.1420, label: 1, bag_size: 66144\n",
      "batch 519, loss: 0.0096, instance_loss: 0.0008, weighted_loss: 0.0070, label: 1, bag_size: 46625\n",
      "batch 539, loss: 0.0124, instance_loss: 0.0015, weighted_loss: 0.0091, label: 0, bag_size: 59072\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.9995437956204379: correct 4382/4384\n",
      "Epoch: 9, train_loss: 0.1498, train_clustering_loss:  0.0079, train_error: 0.0493\n",
      "class 0: acc 0.9375, correct 180/192\n",
      "class 1: acc 0.9222222222222223, correct 166/180\n",
      "class 2: acc 0.9943181818181818, correct 175/176\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4077, val_error: 0.0984, auc: 0.9729\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8571428571428571, correct 24/28\n",
      "class 1: acc 0.9523809523809523, correct 20/21\n",
      "class 2: acc 0.9166666666666666, correct 11/12\n",
      "EarlyStopping counter: 1 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0007, instance_loss: 0.0120, weighted_loss: 0.0041, label: 1, bag_size: 74870\n",
      "batch 39, loss: 0.0040, instance_loss: 0.0128, weighted_loss: 0.0066, label: 1, bag_size: 7532\n",
      "batch 59, loss: 0.4349, instance_loss: 0.0050, weighted_loss: 0.3060, label: 1, bag_size: 42983\n",
      "batch 79, loss: 0.0135, instance_loss: 0.0057, weighted_loss: 0.0112, label: 1, bag_size: 81168\n",
      "batch 99, loss: 0.0836, instance_loss: 0.0107, weighted_loss: 0.0617, label: 2, bag_size: 34408\n",
      "batch 119, loss: 0.1464, instance_loss: 0.0058, weighted_loss: 0.1042, label: 1, bag_size: 41704\n",
      "batch 139, loss: 0.0002, instance_loss: 0.0032, weighted_loss: 0.0011, label: 2, bag_size: 79373\n",
      "batch 159, loss: 0.0668, instance_loss: 0.0026, weighted_loss: 0.0476, label: 1, bag_size: 35236\n",
      "batch 179, loss: 0.7767, instance_loss: 0.0015, weighted_loss: 0.5441, label: 2, bag_size: 51554\n",
      "batch 199, loss: 1.8861, instance_loss: 0.0034, weighted_loss: 1.3213, label: 1, bag_size: 72833\n",
      "batch 219, loss: 0.0001, instance_loss: 0.0017, weighted_loss: 0.0006, label: 0, bag_size: 31339\n",
      "batch 239, loss: 0.0783, instance_loss: 0.0004, weighted_loss: 0.0549, label: 2, bag_size: 87135\n",
      "batch 259, loss: 0.0051, instance_loss: 0.0007, weighted_loss: 0.0038, label: 0, bag_size: 92719\n",
      "batch 279, loss: 0.0001, instance_loss: 0.0018, weighted_loss: 0.0006, label: 1, bag_size: 54122\n",
      "batch 299, loss: 0.0215, instance_loss: 0.0027, weighted_loss: 0.0158, label: 0, bag_size: 108807\n",
      "batch 319, loss: 0.0104, instance_loss: 0.0078, weighted_loss: 0.0096, label: 0, bag_size: 80774\n",
      "batch 339, loss: 0.0001, instance_loss: 0.0046, weighted_loss: 0.0015, label: 2, bag_size: 68187\n",
      "batch 359, loss: 0.0000, instance_loss: 0.0003, weighted_loss: 0.0001, label: 1, bag_size: 59576\n",
      "batch 379, loss: 1.6379, instance_loss: 0.0095, weighted_loss: 1.1493, label: 1, bag_size: 81168\n",
      "batch 399, loss: 0.0014, instance_loss: 0.0036, weighted_loss: 0.0021, label: 1, bag_size: 16596\n",
      "batch 419, loss: 0.0024, instance_loss: 0.0039, weighted_loss: 0.0029, label: 1, bag_size: 57548\n",
      "batch 439, loss: 0.0053, instance_loss: 0.0014, weighted_loss: 0.0041, label: 0, bag_size: 33045\n",
      "batch 459, loss: 0.0001, instance_loss: 0.0044, weighted_loss: 0.0014, label: 0, bag_size: 11462\n",
      "batch 479, loss: 0.0062, instance_loss: 0.0032, weighted_loss: 0.0053, label: 1, bag_size: 13487\n",
      "batch 499, loss: 0.0007, instance_loss: 0.0017, weighted_loss: 0.0010, label: 0, bag_size: 73748\n",
      "batch 519, loss: 0.0006, instance_loss: 0.0036, weighted_loss: 0.0015, label: 2, bag_size: 50045\n",
      "batch 539, loss: 0.0007, instance_loss: 0.0010, weighted_loss: 0.0008, label: 0, bag_size: 70900\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.999771897810219: correct 4383/4384\n",
      "Epoch: 10, train_loss: 0.1572, train_clustering_loss:  0.0080, train_error: 0.0639\n",
      "class 0: acc 0.8953488372093024, correct 154/172\n",
      "class 1: acc 0.9312169312169312, correct 176/189\n",
      "class 2: acc 0.9786096256684492, correct 183/187\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3452, val_error: 0.0984, auc: 0.9718\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8928571428571429, correct 25/28\n",
      "class 1: acc 0.8571428571428571, correct 18/21\n",
      "class 2: acc 1.0, correct 12/12\n",
      "EarlyStopping counter: 2 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0031, instance_loss: 0.0032, weighted_loss: 0.0031, label: 2, bag_size: 38471\n",
      "batch 39, loss: 0.0019, instance_loss: 0.0021, weighted_loss: 0.0019, label: 0, bag_size: 94524\n",
      "batch 59, loss: 0.2261, instance_loss: 0.0163, weighted_loss: 0.1632, label: 2, bag_size: 12043\n",
      "batch 79, loss: 0.0005, instance_loss: 0.0012, weighted_loss: 0.0007, label: 0, bag_size: 89352\n",
      "batch 99, loss: 0.1762, instance_loss: 0.0473, weighted_loss: 0.1375, label: 1, bag_size: 5928\n",
      "batch 119, loss: 0.1082, instance_loss: 0.0158, weighted_loss: 0.0805, label: 2, bag_size: 58111\n",
      "batch 139, loss: 0.1765, instance_loss: 0.0242, weighted_loss: 0.1308, label: 2, bag_size: 11415\n",
      "batch 159, loss: 0.3042, instance_loss: 0.0042, weighted_loss: 0.2142, label: 0, bag_size: 63845\n",
      "batch 179, loss: 0.0013, instance_loss: 0.0009, weighted_loss: 0.0012, label: 0, bag_size: 71332\n",
      "batch 199, loss: 0.0040, instance_loss: 0.0027, weighted_loss: 0.0036, label: 2, bag_size: 66023\n",
      "batch 219, loss: 0.3568, instance_loss: 0.0059, weighted_loss: 0.2515, label: 2, bag_size: 58111\n",
      "batch 239, loss: 0.0737, instance_loss: 0.0036, weighted_loss: 0.0526, label: 2, bag_size: 64675\n",
      "batch 259, loss: 0.0884, instance_loss: 0.0000, weighted_loss: 0.0619, label: 1, bag_size: 75744\n",
      "batch 279, loss: 0.0007, instance_loss: 0.0197, weighted_loss: 0.0064, label: 2, bag_size: 48593\n",
      "batch 299, loss: 0.0284, instance_loss: 0.0031, weighted_loss: 0.0209, label: 1, bag_size: 41704\n",
      "batch 319, loss: 0.5280, instance_loss: 0.0005, weighted_loss: 0.3697, label: 1, bag_size: 52176\n",
      "batch 339, loss: 0.0026, instance_loss: 0.0037, weighted_loss: 0.0029, label: 0, bag_size: 58267\n",
      "batch 359, loss: 0.0001, instance_loss: 0.0026, weighted_loss: 0.0009, label: 0, bag_size: 16262\n",
      "batch 379, loss: 0.0333, instance_loss: 0.0007, weighted_loss: 0.0235, label: 1, bag_size: 32084\n",
      "batch 399, loss: 0.0156, instance_loss: 0.0002, weighted_loss: 0.0110, label: 1, bag_size: 33537\n",
      "batch 419, loss: 0.0002, instance_loss: 0.0003, weighted_loss: 0.0002, label: 0, bag_size: 65462\n",
      "batch 439, loss: 0.0012, instance_loss: 0.0025, weighted_loss: 0.0016, label: 1, bag_size: 53211\n",
      "batch 459, loss: 0.0889, instance_loss: 0.0007, weighted_loss: 0.0625, label: 2, bag_size: 87135\n",
      "batch 479, loss: 0.0751, instance_loss: 0.0022, weighted_loss: 0.0533, label: 1, bag_size: 73500\n",
      "batch 499, loss: 0.0028, instance_loss: 0.0016, weighted_loss: 0.0024, label: 1, bag_size: 62739\n",
      "batch 519, loss: 0.0058, instance_loss: 0.0006, weighted_loss: 0.0043, label: 1, bag_size: 55270\n",
      "batch 539, loss: 0.1089, instance_loss: 0.0033, weighted_loss: 0.0772, label: 2, bag_size: 10243\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.999771897810219: correct 4383/4384\n",
      "Epoch: 11, train_loss: 0.1679, train_clustering_loss:  0.0065, train_error: 0.0547\n",
      "class 0: acc 0.9075144508670521, correct 157/173\n",
      "class 1: acc 0.9351351351351351, correct 173/185\n",
      "class 2: acc 0.9894736842105263, correct 188/190\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3263, val_error: 0.1148, auc: 0.9758\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8571428571428571, correct 24/28\n",
      "class 1: acc 0.9047619047619048, correct 19/21\n",
      "class 2: acc 0.9166666666666666, correct 11/12\n",
      "Validation loss decreased (0.975474 --> 0.975783).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0247, instance_loss: 0.0006, weighted_loss: 0.0175, label: 0, bag_size: 104395\n",
      "batch 39, loss: 0.0008, instance_loss: 0.0039, weighted_loss: 0.0018, label: 0, bag_size: 58844\n",
      "batch 59, loss: 0.0016, instance_loss: 0.0005, weighted_loss: 0.0013, label: 0, bag_size: 55983\n",
      "batch 79, loss: 0.0002, instance_loss: 0.0066, weighted_loss: 0.0021, label: 1, bag_size: 11509\n",
      "batch 99, loss: 0.0592, instance_loss: 0.0016, weighted_loss: 0.0419, label: 2, bag_size: 54265\n",
      "batch 119, loss: 0.0352, instance_loss: 0.0008, weighted_loss: 0.0249, label: 2, bag_size: 91267\n",
      "batch 139, loss: 0.6357, instance_loss: 0.0040, weighted_loss: 0.4462, label: 0, bag_size: 80619\n",
      "batch 159, loss: 0.5556, instance_loss: 0.0095, weighted_loss: 0.3918, label: 0, bag_size: 13539\n",
      "batch 179, loss: 0.0639, instance_loss: 0.0014, weighted_loss: 0.0452, label: 2, bag_size: 38471\n",
      "batch 199, loss: 0.0050, instance_loss: 0.0009, weighted_loss: 0.0037, label: 0, bag_size: 61317\n",
      "batch 219, loss: 0.0016, instance_loss: 0.0023, weighted_loss: 0.0018, label: 2, bag_size: 51251\n",
      "batch 239, loss: 0.0014, instance_loss: 0.0105, weighted_loss: 0.0042, label: 1, bag_size: 23894\n",
      "batch 259, loss: 0.0570, instance_loss: 0.0017, weighted_loss: 0.0404, label: 0, bag_size: 23506\n",
      "batch 279, loss: 0.3767, instance_loss: 0.0114, weighted_loss: 0.2671, label: 2, bag_size: 23841\n",
      "batch 299, loss: 0.0005, instance_loss: 0.0056, weighted_loss: 0.0020, label: 0, bag_size: 26331\n",
      "batch 319, loss: 0.4056, instance_loss: 0.0002, weighted_loss: 0.2840, label: 1, bag_size: 92502\n",
      "batch 339, loss: 0.0009, instance_loss: 0.0054, weighted_loss: 0.0022, label: 2, bag_size: 66345\n",
      "batch 359, loss: 0.0119, instance_loss: 0.0005, weighted_loss: 0.0085, label: 0, bag_size: 48589\n",
      "batch 379, loss: 0.0010, instance_loss: 0.0005, weighted_loss: 0.0008, label: 1, bag_size: 55270\n",
      "batch 399, loss: 0.1300, instance_loss: 0.0009, weighted_loss: 0.0913, label: 1, bag_size: 29399\n",
      "batch 419, loss: 0.0023, instance_loss: 0.0027, weighted_loss: 0.0024, label: 2, bag_size: 68187\n",
      "batch 439, loss: 0.1627, instance_loss: 0.0053, weighted_loss: 0.1155, label: 0, bag_size: 29966\n",
      "batch 459, loss: 0.0013, instance_loss: 0.0003, weighted_loss: 0.0010, label: 1, bag_size: 80573\n",
      "batch 479, loss: 0.0040, instance_loss: 0.0006, weighted_loss: 0.0030, label: 2, bag_size: 69002\n",
      "batch 499, loss: 0.2791, instance_loss: 0.0113, weighted_loss: 0.1987, label: 2, bag_size: 78955\n",
      "batch 519, loss: 0.0160, instance_loss: 0.0036, weighted_loss: 0.0123, label: 0, bag_size: 59830\n",
      "batch 539, loss: 0.0063, instance_loss: 0.0152, weighted_loss: 0.0089, label: 1, bag_size: 17958\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 12, train_loss: 0.1077, train_clustering_loss:  0.0061, train_error: 0.0219\n",
      "class 0: acc 0.9562841530054644, correct 175/183\n",
      "class 1: acc 0.9792746113989638, correct 189/193\n",
      "class 2: acc 1.0, correct 172/172\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4551, val_error: 0.1803, auc: 0.9673\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8571428571428571, correct 24/28\n",
      "class 1: acc 0.6666666666666666, correct 14/21\n",
      "class 2: acc 1.0, correct 12/12\n",
      "EarlyStopping counter: 1 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0016, instance_loss: 0.0021, weighted_loss: 0.0018, label: 1, bag_size: 15902\n",
      "batch 39, loss: 0.0001, instance_loss: 0.0008, weighted_loss: 0.0003, label: 0, bag_size: 64092\n",
      "batch 59, loss: 0.1783, instance_loss: 0.0889, weighted_loss: 0.1515, label: 0, bag_size: 84026\n",
      "batch 79, loss: 2.8430, instance_loss: 0.0035, weighted_loss: 1.9912, label: 0, bag_size: 51028\n",
      "batch 99, loss: 0.2159, instance_loss: 0.0009, weighted_loss: 0.1514, label: 0, bag_size: 90782\n",
      "batch 119, loss: 0.3644, instance_loss: 0.0047, weighted_loss: 0.2565, label: 2, bag_size: 58111\n",
      "batch 139, loss: 4.6970, instance_loss: 0.0141, weighted_loss: 3.2921, label: 0, bag_size: 21504\n",
      "batch 159, loss: 0.0010, instance_loss: 0.0003, weighted_loss: 0.0008, label: 0, bag_size: 53040\n",
      "batch 179, loss: 0.0222, instance_loss: 0.0049, weighted_loss: 0.0170, label: 2, bag_size: 50978\n",
      "batch 199, loss: 0.0001, instance_loss: 0.0002, weighted_loss: 0.0001, label: 0, bag_size: 75790\n",
      "batch 219, loss: 0.0002, instance_loss: 0.0007, weighted_loss: 0.0004, label: 0, bag_size: 77275\n",
      "batch 239, loss: 0.6751, instance_loss: 0.0082, weighted_loss: 0.4750, label: 2, bag_size: 23841\n",
      "batch 259, loss: 0.0046, instance_loss: 0.0069, weighted_loss: 0.0053, label: 1, bag_size: 44591\n",
      "batch 279, loss: 0.0016, instance_loss: 0.0006, weighted_loss: 0.0013, label: 1, bag_size: 66357\n",
      "batch 299, loss: 0.0009, instance_loss: 0.0221, weighted_loss: 0.0073, label: 2, bag_size: 84436\n",
      "batch 319, loss: 0.0098, instance_loss: 0.0026, weighted_loss: 0.0077, label: 1, bag_size: 10520\n",
      "batch 339, loss: 0.0108, instance_loss: 0.0082, weighted_loss: 0.0100, label: 2, bag_size: 39234\n",
      "batch 359, loss: 0.0008, instance_loss: 0.0000, weighted_loss: 0.0006, label: 1, bag_size: 57799\n",
      "batch 379, loss: 0.0002, instance_loss: 0.0038, weighted_loss: 0.0013, label: 0, bag_size: 73829\n",
      "batch 399, loss: 0.0305, instance_loss: 0.0042, weighted_loss: 0.0226, label: 2, bag_size: 50978\n",
      "batch 419, loss: 0.0183, instance_loss: 0.0098, weighted_loss: 0.0157, label: 2, bag_size: 76843\n",
      "batch 439, loss: 0.1540, instance_loss: 0.0000, weighted_loss: 0.1078, label: 1, bag_size: 54288\n",
      "batch 459, loss: 0.0065, instance_loss: 0.0205, weighted_loss: 0.0107, label: 2, bag_size: 75833\n",
      "batch 479, loss: 0.0008, instance_loss: 0.0004, weighted_loss: 0.0006, label: 0, bag_size: 79446\n",
      "batch 499, loss: 0.0134, instance_loss: 0.0127, weighted_loss: 0.0132, label: 0, bag_size: 15052\n",
      "batch 519, loss: 0.0070, instance_loss: 0.0045, weighted_loss: 0.0063, label: 0, bag_size: 26032\n",
      "batch 539, loss: 0.0002, instance_loss: 0.0046, weighted_loss: 0.0015, label: 1, bag_size: 46798\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 13, train_loss: 0.1412, train_clustering_loss:  0.0070, train_error: 0.0456\n",
      "class 0: acc 0.9368421052631579, correct 178/190\n",
      "class 1: acc 0.9427083333333334, correct 181/192\n",
      "class 2: acc 0.9879518072289156, correct 164/166\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.5102, val_error: 0.1311, auc: 0.9696\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.75, correct 21/28\n",
      "class 1: acc 1.0, correct 21/21\n",
      "class 2: acc 0.9166666666666666, correct 11/12\n",
      "EarlyStopping counter: 2 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.9265, instance_loss: 0.0159, weighted_loss: 0.6534, label: 0, bag_size: 28029\n",
      "batch 39, loss: 0.0210, instance_loss: 0.0006, weighted_loss: 0.0149, label: 2, bag_size: 59224\n",
      "batch 59, loss: 0.0130, instance_loss: 0.0011, weighted_loss: 0.0094, label: 0, bag_size: 18166\n",
      "batch 79, loss: 0.0011, instance_loss: 0.0054, weighted_loss: 0.0024, label: 2, bag_size: 68187\n",
      "batch 99, loss: 0.0244, instance_loss: 0.0018, weighted_loss: 0.0177, label: 2, bag_size: 91267\n",
      "batch 119, loss: 0.0297, instance_loss: 0.0035, weighted_loss: 0.0218, label: 2, bag_size: 28252\n",
      "batch 139, loss: 0.1168, instance_loss: 0.0185, weighted_loss: 0.0873, label: 2, bag_size: 76843\n",
      "batch 159, loss: 0.9429, instance_loss: 0.0041, weighted_loss: 0.6613, label: 1, bag_size: 87674\n",
      "batch 179, loss: 0.0526, instance_loss: 0.0162, weighted_loss: 0.0417, label: 1, bag_size: 14553\n",
      "batch 199, loss: 0.0197, instance_loss: 0.0006, weighted_loss: 0.0140, label: 1, bag_size: 49445\n",
      "batch 219, loss: 0.1262, instance_loss: 0.0038, weighted_loss: 0.0895, label: 2, bag_size: 15486\n",
      "batch 239, loss: 0.0130, instance_loss: 0.0081, weighted_loss: 0.0115, label: 1, bag_size: 60502\n",
      "batch 259, loss: 0.8195, instance_loss: 0.0031, weighted_loss: 0.5746, label: 2, bag_size: 64675\n",
      "batch 279, loss: 0.0006, instance_loss: 0.0015, weighted_loss: 0.0009, label: 2, bag_size: 79690\n",
      "batch 299, loss: 0.0050, instance_loss: 0.0027, weighted_loss: 0.0043, label: 2, bag_size: 82484\n",
      "batch 319, loss: 0.0038, instance_loss: 0.0056, weighted_loss: 0.0043, label: 1, bag_size: 81168\n",
      "batch 339, loss: 0.0851, instance_loss: 0.0011, weighted_loss: 0.0599, label: 0, bag_size: 87979\n",
      "batch 359, loss: 0.0107, instance_loss: 0.0031, weighted_loss: 0.0084, label: 0, bag_size: 74527\n",
      "batch 379, loss: 0.0002, instance_loss: 0.0065, weighted_loss: 0.0020, label: 2, bag_size: 75833\n",
      "batch 399, loss: 0.1040, instance_loss: 0.0049, weighted_loss: 0.0743, label: 2, bag_size: 15486\n",
      "batch 419, loss: 0.1853, instance_loss: 0.0039, weighted_loss: 0.1308, label: 2, bag_size: 87135\n",
      "batch 439, loss: 0.4153, instance_loss: 0.0047, weighted_loss: 0.2921, label: 0, bag_size: 63775\n",
      "batch 459, loss: 0.0000, instance_loss: 0.0022, weighted_loss: 0.0007, label: 0, bag_size: 67238\n",
      "batch 479, loss: 0.1993, instance_loss: 0.0138, weighted_loss: 0.1436, label: 1, bag_size: 51926\n",
      "batch 499, loss: 0.0009, instance_loss: 0.0069, weighted_loss: 0.0027, label: 1, bag_size: 92385\n",
      "batch 519, loss: 0.0055, instance_loss: 0.0388, weighted_loss: 0.0155, label: 0, bag_size: 19587\n",
      "batch 539, loss: 0.0335, instance_loss: 0.0495, weighted_loss: 0.0383, label: 2, bag_size: 11119\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 14, train_loss: 0.1476, train_clustering_loss:  0.0055, train_error: 0.0438\n",
      "class 0: acc 0.9166666666666666, correct 154/168\n",
      "class 1: acc 0.9567567567567568, correct 177/185\n",
      "class 2: acc 0.9897435897435898, correct 193/195\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3720, val_error: 0.1148, auc: 0.9683\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8928571428571429, correct 25/28\n",
      "class 1: acc 0.9047619047619048, correct 19/21\n",
      "class 2: acc 0.8333333333333334, correct 10/12\n",
      "EarlyStopping counter: 3 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.2146, instance_loss: 0.0094, weighted_loss: 0.1531, label: 2, bag_size: 15486\n",
      "batch 39, loss: 0.0280, instance_loss: 0.0034, weighted_loss: 0.0206, label: 2, bag_size: 52584\n",
      "batch 59, loss: 0.2152, instance_loss: 0.0146, weighted_loss: 0.1550, label: 2, bag_size: 34408\n",
      "batch 79, loss: 0.0020, instance_loss: 0.0104, weighted_loss: 0.0045, label: 0, bag_size: 66702\n",
      "batch 99, loss: 0.0031, instance_loss: 0.0040, weighted_loss: 0.0034, label: 1, bag_size: 113186\n",
      "batch 119, loss: 0.0041, instance_loss: 0.0033, weighted_loss: 0.0038, label: 2, bag_size: 38471\n",
      "batch 139, loss: 0.0792, instance_loss: 0.0099, weighted_loss: 0.0584, label: 2, bag_size: 12043\n",
      "batch 159, loss: 0.0161, instance_loss: 0.0080, weighted_loss: 0.0137, label: 1, bag_size: 10151\n",
      "batch 179, loss: 0.0226, instance_loss: 0.0059, weighted_loss: 0.0176, label: 2, bag_size: 11415\n",
      "batch 199, loss: 0.0390, instance_loss: 0.0059, weighted_loss: 0.0291, label: 2, bag_size: 68302\n",
      "batch 219, loss: 0.0022, instance_loss: 0.0069, weighted_loss: 0.0036, label: 2, bag_size: 84436\n",
      "batch 239, loss: 0.0017, instance_loss: 0.0033, weighted_loss: 0.0022, label: 1, bag_size: 113716\n",
      "batch 259, loss: 0.0017, instance_loss: 0.0039, weighted_loss: 0.0023, label: 1, bag_size: 8990\n",
      "batch 279, loss: 0.0018, instance_loss: 0.0028, weighted_loss: 0.0021, label: 2, bag_size: 48881\n",
      "batch 299, loss: 0.0069, instance_loss: 0.0060, weighted_loss: 0.0066, label: 2, bag_size: 55626\n",
      "batch 319, loss: 0.0611, instance_loss: 0.0022, weighted_loss: 0.0434, label: 2, bag_size: 58111\n",
      "batch 339, loss: 0.0878, instance_loss: 0.0045, weighted_loss: 0.0628, label: 1, bag_size: 10151\n",
      "batch 359, loss: 0.0110, instance_loss: 0.0048, weighted_loss: 0.0092, label: 1, bag_size: 74870\n",
      "batch 379, loss: 0.0268, instance_loss: 0.0027, weighted_loss: 0.0196, label: 0, bag_size: 53043\n",
      "batch 399, loss: 5.5290, instance_loss: 0.0000, weighted_loss: 3.8703, label: 1, bag_size: 48985\n",
      "batch 419, loss: 0.0268, instance_loss: 0.0020, weighted_loss: 0.0193, label: 2, bag_size: 66023\n",
      "batch 439, loss: 0.1867, instance_loss: 0.0045, weighted_loss: 0.1320, label: 1, bag_size: 20115\n",
      "batch 459, loss: 0.0414, instance_loss: 0.0000, weighted_loss: 0.0290, label: 1, bag_size: 108492\n",
      "batch 479, loss: 0.0116, instance_loss: 0.0016, weighted_loss: 0.0086, label: 1, bag_size: 24044\n",
      "batch 499, loss: 0.0026, instance_loss: 0.0002, weighted_loss: 0.0019, label: 1, bag_size: 77740\n",
      "batch 519, loss: 0.7499, instance_loss: 0.0041, weighted_loss: 0.5262, label: 2, bag_size: 23841\n",
      "batch 539, loss: 0.0012, instance_loss: 0.0043, weighted_loss: 0.0022, label: 2, bag_size: 19057\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 15, train_loss: 0.1139, train_clustering_loss:  0.0054, train_error: 0.0401\n",
      "class 0: acc 0.9337349397590361, correct 155/166\n",
      "class 1: acc 0.9427083333333334, correct 181/192\n",
      "class 2: acc 1.0, correct 190/190\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.5109, val_error: 0.0820, auc: 0.9710\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8214285714285714, correct 23/28\n",
      "class 1: acc 1.0, correct 21/21\n",
      "class 2: acc 1.0, correct 12/12\n",
      "EarlyStopping counter: 4 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0002, instance_loss: 0.0000, weighted_loss: 0.0001, label: 1, bag_size: 57077\n",
      "batch 39, loss: 0.0022, instance_loss: 0.0042, weighted_loss: 0.0028, label: 0, bag_size: 68286\n",
      "batch 59, loss: 0.8293, instance_loss: 0.0023, weighted_loss: 0.5812, label: 0, bag_size: 50370\n",
      "batch 79, loss: 0.0070, instance_loss: 0.0037, weighted_loss: 0.0060, label: 0, bag_size: 22371\n",
      "batch 99, loss: 0.1102, instance_loss: 0.0034, weighted_loss: 0.0782, label: 2, bag_size: 51316\n",
      "batch 119, loss: 0.0217, instance_loss: 0.0055, weighted_loss: 0.0169, label: 2, bag_size: 82484\n",
      "batch 139, loss: 0.5380, instance_loss: 0.0071, weighted_loss: 0.3787, label: 2, bag_size: 19057\n",
      "batch 159, loss: 0.0351, instance_loss: 0.0032, weighted_loss: 0.0256, label: 1, bag_size: 56911\n",
      "batch 179, loss: 0.2613, instance_loss: 0.0007, weighted_loss: 0.1831, label: 2, bag_size: 51554\n",
      "batch 199, loss: 0.7856, instance_loss: 0.0016, weighted_loss: 0.5504, label: 0, bag_size: 90782\n",
      "batch 219, loss: 0.0005, instance_loss: 0.0035, weighted_loss: 0.0014, label: 2, bag_size: 95428\n",
      "batch 239, loss: 0.0030, instance_loss: 0.0096, weighted_loss: 0.0049, label: 2, bag_size: 84436\n",
      "batch 259, loss: 0.0001, instance_loss: 0.0097, weighted_loss: 0.0030, label: 0, bag_size: 11462\n",
      "batch 279, loss: 0.0041, instance_loss: 0.0036, weighted_loss: 0.0040, label: 2, bag_size: 63921\n",
      "batch 299, loss: 0.0005, instance_loss: 0.0137, weighted_loss: 0.0044, label: 2, bag_size: 76037\n",
      "batch 319, loss: 0.0316, instance_loss: 0.0027, weighted_loss: 0.0229, label: 2, bag_size: 64675\n",
      "batch 339, loss: 0.0010, instance_loss: 0.0052, weighted_loss: 0.0023, label: 2, bag_size: 63921\n",
      "batch 359, loss: 0.0021, instance_loss: 0.0021, weighted_loss: 0.0021, label: 1, bag_size: 70867\n",
      "batch 379, loss: 0.0008, instance_loss: 0.0039, weighted_loss: 0.0018, label: 1, bag_size: 23894\n",
      "batch 399, loss: 0.0165, instance_loss: 0.0015, weighted_loss: 0.0120, label: 0, bag_size: 38922\n",
      "batch 419, loss: 0.1367, instance_loss: 0.0027, weighted_loss: 0.0965, label: 1, bag_size: 36784\n",
      "batch 439, loss: 0.0737, instance_loss: 0.0000, weighted_loss: 0.0516, label: 1, bag_size: 105100\n",
      "batch 459, loss: 0.0002, instance_loss: 0.0061, weighted_loss: 0.0020, label: 1, bag_size: 74927\n",
      "batch 479, loss: 0.0027, instance_loss: 0.0028, weighted_loss: 0.0027, label: 2, bag_size: 69080\n",
      "batch 499, loss: 0.0019, instance_loss: 0.0067, weighted_loss: 0.0034, label: 0, bag_size: 78549\n",
      "batch 519, loss: 0.4690, instance_loss: 0.0023, weighted_loss: 0.3290, label: 0, bag_size: 52270\n",
      "batch 539, loss: 0.0003, instance_loss: 0.0000, weighted_loss: 0.0002, label: 0, bag_size: 75202\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 16, train_loss: 0.1124, train_clustering_loss:  0.0059, train_error: 0.0474\n",
      "class 0: acc 0.9433962264150944, correct 200/212\n",
      "class 1: acc 0.9230769230769231, correct 156/169\n",
      "class 2: acc 0.9940119760479041, correct 166/167\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4480, val_error: 0.1148, auc: 0.9613\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8571428571428571, correct 24/28\n",
      "class 1: acc 0.9047619047619048, correct 19/21\n",
      "class 2: acc 0.9166666666666666, correct 11/12\n",
      "EarlyStopping counter: 5 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0162, instance_loss: 0.0074, weighted_loss: 0.0135, label: 1, bag_size: 172802\n",
      "batch 39, loss: 0.5453, instance_loss: 0.0016, weighted_loss: 0.3822, label: 0, bag_size: 43682\n",
      "batch 59, loss: 0.0238, instance_loss: 0.0041, weighted_loss: 0.0179, label: 0, bag_size: 19035\n",
      "batch 79, loss: 0.6397, instance_loss: 0.0048, weighted_loss: 0.4492, label: 0, bag_size: 67991\n",
      "batch 99, loss: 0.0035, instance_loss: 0.0026, weighted_loss: 0.0032, label: 1, bag_size: 66357\n",
      "batch 119, loss: 0.0006, instance_loss: 0.0034, weighted_loss: 0.0014, label: 1, bag_size: 72064\n",
      "batch 139, loss: 0.0040, instance_loss: 0.0054, weighted_loss: 0.0044, label: 1, bag_size: 23445\n",
      "batch 159, loss: 0.0002, instance_loss: 0.0017, weighted_loss: 0.0007, label: 0, bag_size: 58844\n",
      "batch 179, loss: 0.4710, instance_loss: 0.0005, weighted_loss: 0.3298, label: 2, bag_size: 51554\n",
      "batch 199, loss: 0.0180, instance_loss: 0.0021, weighted_loss: 0.0133, label: 1, bag_size: 27666\n",
      "batch 219, loss: 0.0059, instance_loss: 0.0033, weighted_loss: 0.0051, label: 0, bag_size: 20131\n",
      "batch 239, loss: 0.0037, instance_loss: 0.0011, weighted_loss: 0.0029, label: 1, bag_size: 39733\n",
      "batch 259, loss: 0.0003, instance_loss: 0.0032, weighted_loss: 0.0012, label: 2, bag_size: 72966\n",
      "batch 279, loss: 0.0006, instance_loss: 0.0009, weighted_loss: 0.0007, label: 1, bag_size: 57077\n",
      "batch 299, loss: 0.2224, instance_loss: 0.0112, weighted_loss: 0.1591, label: 0, bag_size: 41723\n",
      "batch 319, loss: 0.0389, instance_loss: 0.0018, weighted_loss: 0.0278, label: 0, bag_size: 11284\n",
      "batch 339, loss: 0.0003, instance_loss: 0.0013, weighted_loss: 0.0006, label: 1, bag_size: 54122\n",
      "batch 359, loss: 0.1331, instance_loss: 0.0021, weighted_loss: 0.0938, label: 0, bag_size: 43682\n",
      "batch 379, loss: 0.0031, instance_loss: 0.0011, weighted_loss: 0.0025, label: 0, bag_size: 46652\n",
      "batch 399, loss: 0.7090, instance_loss: 0.0011, weighted_loss: 0.4966, label: 1, bag_size: 92502\n",
      "batch 419, loss: 0.0006, instance_loss: 0.0045, weighted_loss: 0.0018, label: 2, bag_size: 68187\n",
      "batch 439, loss: 0.0007, instance_loss: 0.0030, weighted_loss: 0.0014, label: 0, bag_size: 103177\n",
      "batch 459, loss: 0.0388, instance_loss: 0.0044, weighted_loss: 0.0285, label: 1, bag_size: 99153\n",
      "batch 479, loss: 0.1011, instance_loss: 0.0055, weighted_loss: 0.0724, label: 2, bag_size: 50978\n",
      "batch 499, loss: 0.0003, instance_loss: 0.0076, weighted_loss: 0.0025, label: 2, bag_size: 50045\n",
      "batch 519, loss: 0.0103, instance_loss: 0.0023, weighted_loss: 0.0079, label: 0, bag_size: 72021\n",
      "batch 539, loss: 0.0461, instance_loss: 0.0021, weighted_loss: 0.0329, label: 0, bag_size: 37492\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 17, train_loss: 0.1446, train_clustering_loss:  0.0064, train_error: 0.0456\n",
      "class 0: acc 0.9293478260869565, correct 171/184\n",
      "class 1: acc 0.9536082474226805, correct 185/194\n",
      "class 2: acc 0.9823529411764705, correct 167/170\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.3590, val_error: 0.0984, auc: 0.9724\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8571428571428571, correct 24/28\n",
      "class 1: acc 0.9523809523809523, correct 20/21\n",
      "class 2: acc 0.9166666666666666, correct 11/12\n",
      "EarlyStopping counter: 6 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0018, instance_loss: 0.0009, weighted_loss: 0.0015, label: 0, bag_size: 59285\n",
      "batch 39, loss: 2.2116, instance_loss: 0.0027, weighted_loss: 1.5490, label: 1, bag_size: 102214\n",
      "batch 59, loss: 0.0007, instance_loss: 0.0003, weighted_loss: 0.0006, label: 0, bag_size: 66565\n",
      "batch 79, loss: 0.0025, instance_loss: 0.0038, weighted_loss: 0.0029, label: 2, bag_size: 72966\n",
      "batch 99, loss: 0.2184, instance_loss: 0.0066, weighted_loss: 0.1549, label: 1, bag_size: 6649\n",
      "batch 119, loss: 0.8087, instance_loss: 0.0074, weighted_loss: 0.5683, label: 2, bag_size: 58111\n",
      "batch 139, loss: 0.0052, instance_loss: 0.0007, weighted_loss: 0.0039, label: 0, bag_size: 72021\n",
      "batch 159, loss: 0.8005, instance_loss: 0.0045, weighted_loss: 0.5617, label: 0, bag_size: 78533\n",
      "batch 179, loss: 0.0214, instance_loss: 0.0073, weighted_loss: 0.0172, label: 2, bag_size: 78955\n",
      "batch 199, loss: 0.6127, instance_loss: 0.0089, weighted_loss: 0.4316, label: 1, bag_size: 66144\n",
      "batch 219, loss: 0.0049, instance_loss: 0.0017, weighted_loss: 0.0040, label: 2, bag_size: 55626\n",
      "batch 239, loss: 0.0058, instance_loss: 0.0054, weighted_loss: 0.0057, label: 1, bag_size: 44591\n",
      "batch 259, loss: 0.0014, instance_loss: 0.0000, weighted_loss: 0.0010, label: 0, bag_size: 79446\n",
      "batch 279, loss: 0.0134, instance_loss: 0.0032, weighted_loss: 0.0103, label: 2, bag_size: 38471\n",
      "batch 299, loss: 0.0784, instance_loss: 0.0011, weighted_loss: 0.0552, label: 2, bag_size: 54040\n",
      "batch 319, loss: 0.0033, instance_loss: 0.0029, weighted_loss: 0.0032, label: 2, bag_size: 51017\n",
      "batch 339, loss: 0.0005, instance_loss: 0.0023, weighted_loss: 0.0011, label: 0, bag_size: 13926\n",
      "batch 359, loss: 0.0030, instance_loss: 0.0016, weighted_loss: 0.0026, label: 1, bag_size: 35189\n",
      "batch 379, loss: 0.0000, instance_loss: 0.0044, weighted_loss: 0.0013, label: 1, bag_size: 2939\n",
      "batch 399, loss: 0.0011, instance_loss: 0.0051, weighted_loss: 0.0023, label: 2, bag_size: 76037\n",
      "batch 419, loss: 0.0002, instance_loss: 0.0015, weighted_loss: 0.0006, label: 0, bag_size: 36272\n",
      "batch 439, loss: 0.0071, instance_loss: 0.0000, weighted_loss: 0.0050, label: 2, bag_size: 103823\n",
      "batch 459, loss: 0.0090, instance_loss: 0.0028, weighted_loss: 0.0072, label: 1, bag_size: 62739\n",
      "batch 479, loss: 0.0013, instance_loss: 0.0061, weighted_loss: 0.0027, label: 1, bag_size: 51100\n",
      "batch 499, loss: 0.0409, instance_loss: 0.0045, weighted_loss: 0.0300, label: 0, bag_size: 16901\n",
      "batch 519, loss: 0.0000, instance_loss: 0.0008, weighted_loss: 0.0003, label: 0, bag_size: 17071\n",
      "batch 539, loss: 0.0160, instance_loss: 0.0021, weighted_loss: 0.0118, label: 2, bag_size: 87135\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 18, train_loss: 0.0834, train_clustering_loss:  0.0050, train_error: 0.0237\n",
      "class 0: acc 0.9597701149425287, correct 167/174\n",
      "class 1: acc 0.9815950920245399, correct 160/163\n",
      "class 2: acc 0.985781990521327, correct 208/211\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4883, val_error: 0.1311, auc: 0.9628\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8214285714285714, correct 23/28\n",
      "class 1: acc 0.9047619047619048, correct 19/21\n",
      "class 2: acc 0.9166666666666666, correct 11/12\n",
      "EarlyStopping counter: 7 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0006, instance_loss: 0.0037, weighted_loss: 0.0016, label: 2, bag_size: 79690\n",
      "batch 39, loss: 0.0024, instance_loss: 0.0022, weighted_loss: 0.0023, label: 1, bag_size: 93996\n",
      "batch 59, loss: 0.0300, instance_loss: 0.0049, weighted_loss: 0.0225, label: 0, bag_size: 36475\n",
      "batch 79, loss: 0.0102, instance_loss: 0.0030, weighted_loss: 0.0080, label: 2, bag_size: 51017\n",
      "batch 99, loss: 0.0123, instance_loss: 0.0016, weighted_loss: 0.0091, label: 0, bag_size: 43244\n",
      "batch 119, loss: 0.0100, instance_loss: 0.0080, weighted_loss: 0.0094, label: 1, bag_size: 44591\n",
      "batch 139, loss: 0.0263, instance_loss: 0.0045, weighted_loss: 0.0197, label: 1, bag_size: 172802\n",
      "batch 159, loss: 2.3309, instance_loss: 0.0023, weighted_loss: 1.6323, label: 2, bag_size: 64675\n",
      "batch 179, loss: 0.2977, instance_loss: 0.0038, weighted_loss: 0.2095, label: 1, bag_size: 91540\n",
      "batch 199, loss: 0.0891, instance_loss: 0.0037, weighted_loss: 0.0635, label: 1, bag_size: 42983\n",
      "batch 219, loss: 0.0188, instance_loss: 0.1074, weighted_loss: 0.0454, label: 2, bag_size: 1329\n",
      "batch 239, loss: 0.0006, instance_loss: 0.0014, weighted_loss: 0.0008, label: 1, bag_size: 35189\n",
      "batch 259, loss: 0.0002, instance_loss: 0.0036, weighted_loss: 0.0012, label: 1, bag_size: 100810\n",
      "batch 279, loss: 0.0039, instance_loss: 0.0016, weighted_loss: 0.0032, label: 2, bag_size: 72686\n",
      "batch 299, loss: 0.0097, instance_loss: 0.0008, weighted_loss: 0.0070, label: 0, bag_size: 49117\n",
      "batch 319, loss: 0.0029, instance_loss: 0.0006, weighted_loss: 0.0022, label: 0, bag_size: 75475\n",
      "batch 339, loss: 1.0512, instance_loss: 0.0025, weighted_loss: 0.7366, label: 0, bag_size: 74863\n",
      "batch 359, loss: 0.0056, instance_loss: 0.0040, weighted_loss: 0.0051, label: 2, bag_size: 38471\n",
      "batch 379, loss: 0.0171, instance_loss: 0.0025, weighted_loss: 0.0127, label: 2, bag_size: 55626\n",
      "batch 399, loss: 0.0129, instance_loss: 0.0036, weighted_loss: 0.0101, label: 0, bag_size: 98468\n",
      "batch 419, loss: 0.3062, instance_loss: 0.0000, weighted_loss: 0.2144, label: 1, bag_size: 96719\n",
      "batch 439, loss: 0.0023, instance_loss: 0.0134, weighted_loss: 0.0056, label: 1, bag_size: 12022\n",
      "batch 459, loss: 0.0188, instance_loss: 0.0031, weighted_loss: 0.0141, label: 2, bag_size: 82484\n",
      "batch 479, loss: 0.0155, instance_loss: 0.0006, weighted_loss: 0.0111, label: 1, bag_size: 54288\n",
      "batch 499, loss: 0.2562, instance_loss: 0.0016, weighted_loss: 0.1798, label: 1, bag_size: 112356\n",
      "batch 519, loss: 0.0662, instance_loss: 0.0027, weighted_loss: 0.0472, label: 1, bag_size: 52176\n",
      "batch 539, loss: 0.0002, instance_loss: 0.0034, weighted_loss: 0.0011, label: 2, bag_size: 30615\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.999771897810219: correct 4383/4384\n",
      "Epoch: 19, train_loss: 0.1155, train_clustering_loss:  0.0063, train_error: 0.0383\n",
      "class 0: acc 0.9479768786127167, correct 164/173\n",
      "class 1: acc 0.9456521739130435, correct 174/184\n",
      "class 2: acc 0.9895287958115183, correct 189/191\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4403, val_error: 0.0984, auc: 0.9753\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8214285714285714, correct 23/28\n",
      "class 1: acc 1.0, correct 21/21\n",
      "class 2: acc 0.9166666666666666, correct 11/12\n",
      "Validation loss decreased (0.975783 --> 0.975340).  Saving model ...\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.9523809523809524\n",
      "0.9821428571428572\n",
      "0.9914965986394558\n",
      "Val error: 0.0984, ROC AUC: 0.9753\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0.9863414634146341\n",
      "0.9794215795328143\n",
      "0.9978902953586498\n",
      "Test error: 0.0549, ROC AUC: 0.9879\n",
      "class 0: acc 0.92, correct 46/50\n",
      "class 1: acc 0.9655172413793104, correct 28/29\n",
      "class 2: acc 1.0, correct 12/12\n",
      "   Unnamed: 0       case_id  \\\n",
      "0           0  TCGA-4A-A93X   \n",
      "1           1  TCGA-B3-4104   \n",
      "2           2  TCGA-BP-4963   \n",
      "3           3  TCGA-BP-5170   \n",
      "4           4  TCGA-BP-5175   \n",
      "\n",
      "                                            slide_id oncotree_code site   age  \\\n",
      "0  TCGA-4A-A93X-01Z-00-DX2.45011BF1-FED8-4D22-B5E...          PRCC   4A  58.0   \n",
      "1  TCGA-B3-4104-01Z-00-DX1.0783e269-2e8a-4f32-b91...          PRCC   B3  75.0   \n",
      "2  TCGA-BP-4963-01Z-00-DX1.7e206961-5271-40d3-a96...         CCRCC   BP  63.0   \n",
      "3  TCGA-BP-5170-01Z-00-DX1.ae43bef7-3d81-4f69-be3...         CCRCC   BP  55.0   \n",
      "4  TCGA-BP-5175-01Z-00-DX1.e954ae94-307c-475e-9f6...         CCRCC   BP  60.0   \n",
      "\n",
      "   survival_months  is_female  censorship race label  \n",
      "0            12.81        0.0         1.0    W     1  \n",
      "1            34.46        0.0         1.0    W     1  \n",
      "2            60.25        0.0         1.0    W     0  \n",
      "3            79.24        0.0         1.0    W     0  \n",
      "4            30.62        0.0         1.0    W     0  \n",
      "Traing Data Size ({1.00}): 548 ./RCC/FEATURES_DIRECTORY_beph/pt_files/\n",
      "Cluster file missing\n",
      "Traing Data Size ({1.00}): 61 ./RCC/FEATURES_DIRECTORY_beph/pt_files/\n",
      "Cluster file missing\n",
      "Traing Data Size ({1.00}): 91 ./RCC/FEATURES_DIRECTORY_beph/pt_files/\n",
      "Cluster file missing\n",
      "\n",
      "Training Fold 4!\n",
      "\n",
      "Init train/val/test splits... \n",
      "Done!\n",
      "Training on 548 samples\n",
      "Validating on 61 samples\n",
      "Testing on 91 samples\n",
      "\n",
      "Init loss function... Done!\n",
      "\n",
      "Init Model... Setting tau to 1.0\n",
      "Done!\n",
      "CLAM_SB(\n",
      "  (attention_net): Sequential(\n",
      "    (0): Linear(in_features=384, out_features=384, bias=True)\n",
      "    (1): ReLU()\n",
      "    (2): Dropout(p=0.25, inplace=False)\n",
      "    (3): Attn_Net_Gated(\n",
      "      (attention_a): Sequential(\n",
      "        (0): Linear(in_features=384, out_features=256, bias=True)\n",
      "        (1): Tanh()\n",
      "        (2): Dropout(p=0.25, inplace=False)\n",
      "      )\n",
      "      (attention_b): Sequential(\n",
      "        (0): Linear(in_features=384, out_features=256, bias=True)\n",
      "        (1): Sigmoid()\n",
      "        (2): Dropout(p=0.25, inplace=False)\n",
      "      )\n",
      "      (attention_c): Linear(in_features=256, out_features=1, bias=True)\n",
      "    )\n",
      "  )\n",
      "  (feature_linear1): Linear(in_features=768, out_features=384, bias=True)\n",
      "  (classifiers): Linear(in_features=384, out_features=3, bias=True)\n",
      "  (instance_classifiers): ModuleList(\n",
      "    (0): Linear(in_features=384, out_features=2, bias=True)\n",
      "    (1): Linear(in_features=384, out_features=2, bias=True)\n",
      "    (2): Linear(in_features=384, out_features=2, bias=True)\n",
      "  )\n",
      "  (instance_loss_fn): SmoothTop1SVM()\n",
      ")\n",
      "Total number of parameters: 643978\n",
      "Total number of trainable parameters: 643978\n",
      "\n",
      "Init optimizer ... Done!\n",
      "\n",
      "Init Loaders... Done!\n",
      "\n",
      "Setup EarlyStopping... Done!\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 1.7897, instance_loss: 1.5369, weighted_loss: 1.7138, label: 2, bag_size: 15486\n",
      "batch 39, loss: 1.0980, instance_loss: 0.8427, weighted_loss: 1.0214, label: 2, bag_size: 36978\n",
      "batch 59, loss: 0.3765, instance_loss: 1.5448, weighted_loss: 0.7270, label: 1, bag_size: 23445\n",
      "batch 79, loss: 2.6482, instance_loss: 0.4833, weighted_loss: 1.9987, label: 0, bag_size: 72970\n",
      "batch 99, loss: 1.3923, instance_loss: 0.4285, weighted_loss: 1.1031, label: 1, bag_size: 74256\n",
      "batch 119, loss: 0.2657, instance_loss: 0.2215, weighted_loss: 0.2524, label: 2, bag_size: 59810\n",
      "batch 139, loss: 0.6749, instance_loss: 0.4002, weighted_loss: 0.5925, label: 0, bag_size: 34548\n",
      "batch 159, loss: 1.9725, instance_loss: 0.3197, weighted_loss: 1.4767, label: 2, bag_size: 52584\n",
      "batch 179, loss: 0.2777, instance_loss: 0.7738, weighted_loss: 0.4266, label: 1, bag_size: 47660\n",
      "batch 199, loss: 1.6169, instance_loss: 0.0300, weighted_loss: 1.1409, label: 1, bag_size: 114116\n",
      "batch 219, loss: 0.3215, instance_loss: 0.2720, weighted_loss: 0.3066, label: 0, bag_size: 11284\n",
      "batch 239, loss: 1.1286, instance_loss: 0.1708, weighted_loss: 0.8412, label: 1, bag_size: 5928\n",
      "batch 259, loss: 0.0446, instance_loss: 0.0622, weighted_loss: 0.0499, label: 1, bag_size: 20374\n",
      "batch 279, loss: 0.1827, instance_loss: 0.0130, weighted_loss: 0.1318, label: 1, bag_size: 81168\n",
      "batch 299, loss: 0.4187, instance_loss: 0.0298, weighted_loss: 0.3021, label: 2, bag_size: 54265\n",
      "batch 319, loss: 0.3411, instance_loss: 0.1988, weighted_loss: 0.2984, label: 2, bag_size: 39234\n",
      "batch 339, loss: 0.1273, instance_loss: 0.2839, weighted_loss: 0.1743, label: 2, bag_size: 66345\n",
      "batch 359, loss: 0.1343, instance_loss: 0.0990, weighted_loss: 0.1237, label: 0, bag_size: 65592\n",
      "batch 379, loss: 0.6599, instance_loss: 0.0049, weighted_loss: 0.4634, label: 1, bag_size: 27666\n",
      "batch 399, loss: 0.0269, instance_loss: 0.0148, weighted_loss: 0.0232, label: 0, bag_size: 61195\n",
      "batch 419, loss: 0.6858, instance_loss: 0.0217, weighted_loss: 0.4866, label: 1, bag_size: 80573\n",
      "batch 439, loss: 0.0201, instance_loss: 0.0143, weighted_loss: 0.0183, label: 0, bag_size: 35639\n",
      "batch 459, loss: 0.0619, instance_loss: 0.0028, weighted_loss: 0.0442, label: 0, bag_size: 87979\n",
      "batch 479, loss: 2.2025, instance_loss: 0.0055, weighted_loss: 1.5434, label: 1, bag_size: 45064\n",
      "batch 499, loss: 0.3628, instance_loss: 0.0174, weighted_loss: 0.2592, label: 2, bag_size: 15486\n",
      "batch 519, loss: 0.7217, instance_loss: 0.0137, weighted_loss: 0.5093, label: 1, bag_size: 39770\n",
      "batch 539, loss: 0.4380, instance_loss: 0.0195, weighted_loss: 0.3125, label: 2, bag_size: 54040\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.9546076642335767: correct 4185/4384\n",
      "class 1 clustering acc 0.9071624087591241: correct 3977/4384\n",
      "Epoch: 0, train_loss: 0.6595, train_clustering_loss:  0.2912, train_error: 0.2719\n",
      "class 0: acc 0.712707182320442, correct 129/181\n",
      "class 1: acc 0.7368421052631579, correct 140/190\n",
      "class 2: acc 0.7344632768361582, correct 130/177\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4996, val_error: 0.1639, auc: 0.9458\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 0.9979508196721312: correct 487/488\n",
      "class 0: acc 0.8888888888888888, correct 32/36\n",
      "class 1: acc 0.6666666666666666, correct 12/18\n",
      "class 2: acc 1.0, correct 7/7\n",
      "Validation loss decreased (inf --> 0.945751).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0326, instance_loss: 0.0151, weighted_loss: 0.0273, label: 0, bag_size: 39895\n",
      "batch 39, loss: 0.3672, instance_loss: 0.0117, weighted_loss: 0.2606, label: 1, bag_size: 79045\n",
      "batch 59, loss: 0.0408, instance_loss: 0.0289, weighted_loss: 0.0373, label: 1, bag_size: 13709\n",
      "batch 79, loss: 0.1173, instance_loss: 0.0680, weighted_loss: 0.1025, label: 2, bag_size: 12043\n",
      "batch 99, loss: 0.0506, instance_loss: 0.0052, weighted_loss: 0.0370, label: 0, bag_size: 59285\n",
      "batch 119, loss: 0.2470, instance_loss: 0.0082, weighted_loss: 0.1754, label: 2, bag_size: 73189\n",
      "batch 139, loss: 0.0160, instance_loss: 0.0110, weighted_loss: 0.0145, label: 0, bag_size: 93189\n",
      "batch 159, loss: 0.0032, instance_loss: 0.0033, weighted_loss: 0.0032, label: 0, bag_size: 78844\n",
      "batch 179, loss: 0.0195, instance_loss: 0.0593, weighted_loss: 0.0315, label: 2, bag_size: 38471\n",
      "batch 199, loss: 0.0446, instance_loss: 0.0021, weighted_loss: 0.0318, label: 0, bag_size: 90654\n",
      "batch 219, loss: 0.5879, instance_loss: 0.0084, weighted_loss: 0.4141, label: 0, bag_size: 114320\n",
      "batch 239, loss: 0.0751, instance_loss: 0.0314, weighted_loss: 0.0620, label: 2, bag_size: 30615\n",
      "batch 259, loss: 0.0379, instance_loss: 0.0280, weighted_loss: 0.0350, label: 1, bag_size: 70847\n",
      "batch 279, loss: 0.0392, instance_loss: 0.0141, weighted_loss: 0.0317, label: 1, bag_size: 28713\n",
      "batch 299, loss: 0.0028, instance_loss: 0.0161, weighted_loss: 0.0068, label: 0, bag_size: 17071\n",
      "batch 319, loss: 3.1120, instance_loss: 0.0158, weighted_loss: 2.1832, label: 0, bag_size: 21504\n",
      "batch 339, loss: 0.2463, instance_loss: 0.0065, weighted_loss: 0.1744, label: 2, bag_size: 52584\n",
      "batch 359, loss: 0.0370, instance_loss: 0.0283, weighted_loss: 0.0344, label: 2, bag_size: 48881\n",
      "batch 379, loss: 0.8079, instance_loss: 0.0067, weighted_loss: 0.5675, label: 1, bag_size: 43142\n",
      "batch 399, loss: 0.0334, instance_loss: 0.0244, weighted_loss: 0.0307, label: 1, bag_size: 20374\n",
      "batch 419, loss: 0.0370, instance_loss: 0.0020, weighted_loss: 0.0265, label: 2, bag_size: 40315\n",
      "batch 439, loss: 0.1218, instance_loss: 0.0116, weighted_loss: 0.0888, label: 0, bag_size: 25938\n",
      "batch 459, loss: 0.3223, instance_loss: 0.0021, weighted_loss: 0.2263, label: 2, bag_size: 69002\n",
      "batch 479, loss: 0.8009, instance_loss: 0.0022, weighted_loss: 0.5613, label: 2, bag_size: 11415\n",
      "batch 499, loss: 1.7258, instance_loss: 0.0052, weighted_loss: 1.2096, label: 0, bag_size: 74863\n",
      "batch 519, loss: 0.0508, instance_loss: 0.0592, weighted_loss: 0.0533, label: 2, bag_size: 16676\n",
      "batch 539, loss: 0.0274, instance_loss: 0.0008, weighted_loss: 0.0194, label: 2, bag_size: 54265\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.9963503649635036: correct 4368/4384\n",
      "class 1 clustering acc 0.9984032846715328: correct 4377/4384\n",
      "Epoch: 1, train_loss: 0.4100, train_clustering_loss:  0.0257, train_error: 0.1460\n",
      "class 0: acc 0.8212290502793296, correct 147/179\n",
      "class 1: acc 0.8296703296703297, correct 151/182\n",
      "class 2: acc 0.9090909090909091, correct 170/187\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.5126, val_error: 0.1639, auc: 0.9506\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8611111111111112, correct 31/36\n",
      "class 1: acc 0.7222222222222222, correct 13/18\n",
      "class 2: acc 1.0, correct 7/7\n",
      "Validation loss decreased (0.945751 --> 0.950619).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0112, instance_loss: 0.0008, weighted_loss: 0.0081, label: 1, bag_size: 54122\n",
      "batch 39, loss: 1.2607, instance_loss: 0.0095, weighted_loss: 0.8853, label: 0, bag_size: 72204\n",
      "batch 59, loss: 0.2834, instance_loss: 0.0004, weighted_loss: 0.1985, label: 2, bag_size: 52924\n",
      "batch 79, loss: 0.0013, instance_loss: 0.0107, weighted_loss: 0.0041, label: 0, bag_size: 18929\n",
      "batch 99, loss: 0.5138, instance_loss: 0.0207, weighted_loss: 0.3658, label: 2, bag_size: 15486\n",
      "batch 119, loss: 0.0049, instance_loss: 0.0015, weighted_loss: 0.0039, label: 0, bag_size: 73829\n",
      "batch 139, loss: 0.0265, instance_loss: 0.0019, weighted_loss: 0.0191, label: 1, bag_size: 28349\n",
      "batch 159, loss: 0.0717, instance_loss: 0.0047, weighted_loss: 0.0516, label: 2, bag_size: 59224\n",
      "batch 179, loss: 0.0920, instance_loss: 0.0136, weighted_loss: 0.0685, label: 2, bag_size: 54040\n",
      "batch 199, loss: 0.0539, instance_loss: 0.0233, weighted_loss: 0.0447, label: 2, bag_size: 39234\n",
      "batch 219, loss: 0.1498, instance_loss: 0.0133, weighted_loss: 0.1088, label: 0, bag_size: 65592\n",
      "batch 239, loss: 0.2741, instance_loss: 0.0083, weighted_loss: 0.1944, label: 2, bag_size: 64675\n",
      "batch 259, loss: 0.0175, instance_loss: 0.0021, weighted_loss: 0.0129, label: 0, bag_size: 64555\n",
      "batch 279, loss: 0.2960, instance_loss: 0.0134, weighted_loss: 0.2112, label: 2, bag_size: 51554\n",
      "batch 299, loss: 0.0239, instance_loss: 0.0104, weighted_loss: 0.0198, label: 0, bag_size: 55374\n",
      "batch 319, loss: 0.2743, instance_loss: 0.0052, weighted_loss: 0.1935, label: 2, bag_size: 87135\n",
      "batch 339, loss: 0.2139, instance_loss: 0.0059, weighted_loss: 0.1515, label: 0, bag_size: 18166\n",
      "batch 359, loss: 0.0547, instance_loss: 0.0010, weighted_loss: 0.0386, label: 2, bag_size: 66023\n",
      "batch 379, loss: 0.5194, instance_loss: 0.0037, weighted_loss: 0.3647, label: 1, bag_size: 109684\n",
      "batch 399, loss: 0.0048, instance_loss: 0.0349, weighted_loss: 0.0138, label: 2, bag_size: 51251\n",
      "batch 419, loss: 0.0163, instance_loss: 0.0168, weighted_loss: 0.0164, label: 1, bag_size: 113716\n",
      "batch 439, loss: 0.0004, instance_loss: 0.0038, weighted_loss: 0.0014, label: 0, bag_size: 45846\n",
      "batch 459, loss: 0.0024, instance_loss: 0.0044, weighted_loss: 0.0030, label: 0, bag_size: 55538\n",
      "batch 479, loss: 0.5996, instance_loss: 0.0009, weighted_loss: 0.4200, label: 1, bag_size: 60865\n",
      "batch 499, loss: 0.0394, instance_loss: 0.0151, weighted_loss: 0.0322, label: 2, bag_size: 66345\n",
      "batch 519, loss: 0.0158, instance_loss: 0.0629, weighted_loss: 0.0300, label: 2, bag_size: 38471\n",
      "batch 539, loss: 0.0644, instance_loss: 0.0065, weighted_loss: 0.0470, label: 1, bag_size: 51221\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.9977189781021898: correct 4374/4384\n",
      "class 1 clustering acc 0.9984032846715328: correct 4377/4384\n",
      "Epoch: 2, train_loss: 0.3099, train_clustering_loss:  0.0184, train_error: 0.1077\n",
      "class 0: acc 0.8700564971751412, correct 154/177\n",
      "class 1: acc 0.88, correct 176/200\n",
      "class 2: acc 0.9298245614035088, correct 159/171\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.6205, val_error: 0.1967, auc: 0.9549\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 0.9959016393442623: correct 486/488\n",
      "class 0: acc 0.75, correct 27/36\n",
      "class 1: acc 0.8333333333333334, correct 15/18\n",
      "class 2: acc 1.0, correct 7/7\n",
      "Validation loss decreased (0.950619 --> 0.954864).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0345, instance_loss: 0.0036, weighted_loss: 0.0252, label: 2, bag_size: 72686\n",
      "batch 39, loss: 0.1072, instance_loss: 0.1439, weighted_loss: 0.1182, label: 2, bag_size: 68302\n",
      "batch 59, loss: 0.0054, instance_loss: 0.0041, weighted_loss: 0.0050, label: 1, bag_size: 49463\n",
      "batch 79, loss: 0.0641, instance_loss: 0.0007, weighted_loss: 0.0451, label: 1, bag_size: 62303\n",
      "batch 99, loss: 0.0348, instance_loss: 0.0297, weighted_loss: 0.0333, label: 1, bag_size: 14553\n",
      "batch 119, loss: 0.0110, instance_loss: 0.0019, weighted_loss: 0.0083, label: 0, bag_size: 54937\n",
      "batch 139, loss: 1.2872, instance_loss: 0.2397, weighted_loss: 0.9730, label: 0, bag_size: 4963\n",
      "batch 159, loss: 0.0061, instance_loss: 0.0035, weighted_loss: 0.0053, label: 1, bag_size: 45270\n",
      "batch 179, loss: 0.0082, instance_loss: 0.0256, weighted_loss: 0.0134, label: 2, bag_size: 46661\n",
      "batch 199, loss: 0.5973, instance_loss: 0.0009, weighted_loss: 0.4184, label: 1, bag_size: 75744\n",
      "batch 219, loss: 0.0476, instance_loss: 0.1527, weighted_loss: 0.0792, label: 2, bag_size: 75833\n",
      "batch 239, loss: 0.5280, instance_loss: 0.0006, weighted_loss: 0.3698, label: 0, bag_size: 69588\n",
      "batch 259, loss: 0.1335, instance_loss: 0.0813, weighted_loss: 0.1178, label: 2, bag_size: 39234\n",
      "batch 279, loss: 0.2025, instance_loss: 0.0060, weighted_loss: 0.1435, label: 2, bag_size: 68302\n",
      "batch 299, loss: 0.5961, instance_loss: 0.0086, weighted_loss: 0.4199, label: 1, bag_size: 45812\n",
      "batch 319, loss: 0.1136, instance_loss: 0.0104, weighted_loss: 0.0826, label: 0, bag_size: 53181\n",
      "batch 339, loss: 0.0447, instance_loss: 0.0118, weighted_loss: 0.0348, label: 2, bag_size: 51316\n",
      "batch 359, loss: 0.2273, instance_loss: 0.0011, weighted_loss: 0.1595, label: 1, bag_size: 50027\n",
      "batch 379, loss: 0.0054, instance_loss: 0.0184, weighted_loss: 0.0093, label: 2, bag_size: 46661\n",
      "batch 399, loss: 0.1320, instance_loss: 0.1980, weighted_loss: 0.1518, label: 2, bag_size: 71763\n",
      "batch 419, loss: 0.3282, instance_loss: 0.0020, weighted_loss: 0.2304, label: 2, bag_size: 52924\n",
      "batch 439, loss: 1.5035, instance_loss: 0.0020, weighted_loss: 1.0530, label: 2, bag_size: 23841\n",
      "batch 459, loss: 0.0184, instance_loss: 0.0191, weighted_loss: 0.0186, label: 1, bag_size: 74927\n",
      "batch 479, loss: 1.6056, instance_loss: 0.0117, weighted_loss: 1.1274, label: 2, bag_size: 28252\n",
      "batch 499, loss: 0.0767, instance_loss: 0.0103, weighted_loss: 0.0568, label: 0, bag_size: 74183\n",
      "batch 519, loss: 0.1643, instance_loss: 0.0303, weighted_loss: 0.1241, label: 1, bag_size: 42997\n",
      "batch 539, loss: 0.2188, instance_loss: 0.0084, weighted_loss: 0.1557, label: 2, bag_size: 65317\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.9977189781021898: correct 4374/4384\n",
      "class 1 clustering acc 0.9965784671532847: correct 4369/4384\n",
      "Epoch: 3, train_loss: 0.2941, train_clustering_loss:  0.0238, train_error: 0.1131\n",
      "class 0: acc 0.8719211822660099, correct 177/203\n",
      "class 1: acc 0.8430232558139535, correct 145/172\n",
      "class 2: acc 0.9479768786127167, correct 164/173\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.5005, val_error: 0.1639, auc: 0.9552\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 0.9938524590163934: correct 485/488\n",
      "class 0: acc 0.7777777777777778, correct 28/36\n",
      "class 1: acc 0.8888888888888888, correct 16/18\n",
      "class 2: acc 1.0, correct 7/7\n",
      "Validation loss decreased (0.954864 --> 0.955239).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.3570, instance_loss: 0.0048, weighted_loss: 0.2514, label: 2, bag_size: 50246\n",
      "batch 39, loss: 0.0204, instance_loss: 0.0341, weighted_loss: 0.0245, label: 2, bag_size: 79690\n",
      "batch 59, loss: 0.0035, instance_loss: 0.0105, weighted_loss: 0.0056, label: 2, bag_size: 51251\n",
      "batch 79, loss: 0.2435, instance_loss: 0.0012, weighted_loss: 0.1708, label: 2, bag_size: 49458\n",
      "batch 99, loss: 0.4750, instance_loss: 0.0027, weighted_loss: 0.3333, label: 2, bag_size: 52584\n",
      "batch 119, loss: 0.1247, instance_loss: 0.0075, weighted_loss: 0.0895, label: 2, bag_size: 71567\n",
      "batch 139, loss: 0.0290, instance_loss: 0.0776, weighted_loss: 0.0436, label: 0, bag_size: 19587\n",
      "batch 159, loss: 0.0634, instance_loss: 0.0015, weighted_loss: 0.0448, label: 2, bag_size: 65340\n",
      "batch 179, loss: 0.0881, instance_loss: 0.0003, weighted_loss: 0.0617, label: 2, bag_size: 87135\n",
      "batch 199, loss: 0.0622, instance_loss: 0.0088, weighted_loss: 0.0462, label: 2, bag_size: 10243\n",
      "batch 219, loss: 0.0118, instance_loss: 0.0010, weighted_loss: 0.0085, label: 0, bag_size: 47784\n",
      "batch 239, loss: 0.0516, instance_loss: 0.0033, weighted_loss: 0.0371, label: 2, bag_size: 87135\n",
      "batch 259, loss: 0.0453, instance_loss: 0.0041, weighted_loss: 0.0329, label: 0, bag_size: 23076\n",
      "batch 279, loss: 0.4516, instance_loss: 0.0004, weighted_loss: 0.3162, label: 1, bag_size: 112934\n",
      "batch 299, loss: 0.0429, instance_loss: 0.0564, weighted_loss: 0.0470, label: 2, bag_size: 16676\n",
      "batch 319, loss: 0.5278, instance_loss: 0.0067, weighted_loss: 0.3714, label: 0, bag_size: 22685\n",
      "batch 339, loss: 0.0553, instance_loss: 0.0015, weighted_loss: 0.0391, label: 0, bag_size: 49117\n",
      "batch 359, loss: 0.0048, instance_loss: 0.0591, weighted_loss: 0.0211, label: 1, bag_size: 44760\n",
      "batch 379, loss: 0.0219, instance_loss: 0.0017, weighted_loss: 0.0158, label: 2, bag_size: 49458\n",
      "batch 399, loss: 0.0077, instance_loss: 0.0041, weighted_loss: 0.0066, label: 1, bag_size: 12138\n",
      "batch 419, loss: 0.1480, instance_loss: 0.0052, weighted_loss: 0.1051, label: 1, bag_size: 91540\n",
      "batch 439, loss: 0.0031, instance_loss: 0.0015, weighted_loss: 0.0027, label: 0, bag_size: 72078\n",
      "batch 459, loss: 1.2993, instance_loss: 0.0003, weighted_loss: 0.9096, label: 1, bag_size: 67478\n",
      "batch 479, loss: 1.2817, instance_loss: 0.0089, weighted_loss: 0.8999, label: 2, bag_size: 64675\n",
      "batch 499, loss: 0.0005, instance_loss: 0.0083, weighted_loss: 0.0028, label: 2, bag_size: 51251\n",
      "batch 519, loss: 0.2200, instance_loss: 0.0027, weighted_loss: 0.1548, label: 0, bag_size: 93784\n",
      "batch 539, loss: 0.2873, instance_loss: 0.0071, weighted_loss: 0.2032, label: 2, bag_size: 50246\n",
      "\n",
      "\n",
      "class 0 clustering acc 0.9995437956204379: correct 4382/4384\n",
      "class 1 clustering acc 0.9977189781021898: correct 4374/4384\n",
      "Epoch: 4, train_loss: 0.2516, train_clustering_loss:  0.0124, train_error: 0.0821\n",
      "class 0: acc 0.9285714285714286, correct 182/196\n",
      "class 1: acc 0.8907103825136612, correct 163/183\n",
      "class 2: acc 0.9349112426035503, correct 158/169\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.6775, val_error: 0.1803, auc: 0.9558\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.7222222222222222, correct 26/36\n",
      "class 1: acc 0.9444444444444444, correct 17/18\n",
      "class 2: acc 1.0, correct 7/7\n",
      "Validation loss decreased (0.955239 --> 0.955798).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0047, instance_loss: 0.0101, weighted_loss: 0.0063, label: 0, bag_size: 67238\n",
      "batch 39, loss: 0.0531, instance_loss: 0.0010, weighted_loss: 0.0375, label: 2, bag_size: 51554\n",
      "batch 59, loss: 0.0016, instance_loss: 0.0004, weighted_loss: 0.0012, label: 0, bag_size: 67411\n",
      "batch 79, loss: 0.0431, instance_loss: 0.0005, weighted_loss: 0.0303, label: 1, bag_size: 8400\n",
      "batch 99, loss: 0.0262, instance_loss: 0.0022, weighted_loss: 0.0190, label: 1, bag_size: 46625\n",
      "batch 119, loss: 0.0482, instance_loss: 0.0104, weighted_loss: 0.0369, label: 2, bag_size: 12043\n",
      "batch 139, loss: 0.0019, instance_loss: 0.0000, weighted_loss: 0.0013, label: 1, bag_size: 55270\n",
      "batch 159, loss: 0.0488, instance_loss: 0.0045, weighted_loss: 0.0355, label: 2, bag_size: 47138\n",
      "batch 179, loss: 0.5231, instance_loss: 0.0021, weighted_loss: 0.3668, label: 1, bag_size: 112356\n",
      "batch 199, loss: 0.5806, instance_loss: 0.0008, weighted_loss: 0.4067, label: 1, bag_size: 92502\n",
      "batch 219, loss: 0.0110, instance_loss: 0.0015, weighted_loss: 0.0081, label: 1, bag_size: 8400\n",
      "batch 239, loss: 0.7610, instance_loss: 0.0040, weighted_loss: 0.5339, label: 0, bag_size: 72204\n",
      "batch 259, loss: 0.1393, instance_loss: 0.0000, weighted_loss: 0.0975, label: 0, bag_size: 90911\n",
      "batch 279, loss: 0.8472, instance_loss: 0.0032, weighted_loss: 0.5940, label: 1, bag_size: 49727\n",
      "batch 299, loss: 0.0129, instance_loss: 0.0007, weighted_loss: 0.0093, label: 2, bag_size: 72015\n",
      "batch 319, loss: 0.2711, instance_loss: 0.0021, weighted_loss: 0.1904, label: 1, bag_size: 39770\n",
      "batch 339, loss: 0.0105, instance_loss: 0.0181, weighted_loss: 0.0128, label: 2, bag_size: 82484\n",
      "batch 359, loss: 0.1300, instance_loss: 0.0021, weighted_loss: 0.0916, label: 2, bag_size: 87135\n",
      "batch 379, loss: 0.0934, instance_loss: 0.0094, weighted_loss: 0.0682, label: 2, bag_size: 52584\n",
      "batch 399, loss: 0.0156, instance_loss: 0.0013, weighted_loss: 0.0114, label: 2, bag_size: 59810\n",
      "batch 419, loss: 0.9141, instance_loss: 0.0050, weighted_loss: 0.6413, label: 0, bag_size: 63845\n",
      "batch 439, loss: 0.0170, instance_loss: 0.0038, weighted_loss: 0.0130, label: 0, bag_size: 80173\n",
      "batch 459, loss: 0.0878, instance_loss: 0.0076, weighted_loss: 0.0638, label: 0, bag_size: 61703\n",
      "batch 479, loss: 0.0528, instance_loss: 0.0018, weighted_loss: 0.0375, label: 1, bag_size: 8400\n",
      "batch 499, loss: 0.0059, instance_loss: 0.0024, weighted_loss: 0.0048, label: 1, bag_size: 100810\n",
      "batch 519, loss: 0.1364, instance_loss: 0.0015, weighted_loss: 0.0959, label: 2, bag_size: 34408\n",
      "batch 539, loss: 0.0072, instance_loss: 0.0146, weighted_loss: 0.0094, label: 2, bag_size: 39234\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.999771897810219: correct 4383/4384\n",
      "Epoch: 5, train_loss: 0.2164, train_clustering_loss:  0.0074, train_error: 0.0712\n",
      "class 0: acc 0.9, correct 144/160\n",
      "class 1: acc 0.9180327868852459, correct 168/183\n",
      "class 2: acc 0.9609756097560975, correct 197/205\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.5711, val_error: 0.1639, auc: 0.9647\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8055555555555556, correct 29/36\n",
      "class 1: acc 0.8333333333333334, correct 15/18\n",
      "class 2: acc 1.0, correct 7/7\n",
      "Validation loss decreased (0.955798 --> 0.964702).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0031, instance_loss: 0.0064, weighted_loss: 0.0041, label: 1, bag_size: 74870\n",
      "batch 39, loss: 0.0428, instance_loss: 0.0031, weighted_loss: 0.0309, label: 2, bag_size: 68302\n",
      "batch 59, loss: 0.0107, instance_loss: 0.0063, weighted_loss: 0.0094, label: 2, bag_size: 59810\n",
      "batch 79, loss: 0.0205, instance_loss: 0.0008, weighted_loss: 0.0146, label: 0, bag_size: 9321\n",
      "batch 99, loss: 0.1305, instance_loss: 0.0123, weighted_loss: 0.0950, label: 0, bag_size: 49951\n",
      "batch 119, loss: 0.0115, instance_loss: 0.0024, weighted_loss: 0.0088, label: 1, bag_size: 56317\n",
      "batch 139, loss: 0.5799, instance_loss: 0.0027, weighted_loss: 0.4067, label: 0, bag_size: 66882\n",
      "batch 159, loss: 0.0203, instance_loss: 0.0005, weighted_loss: 0.0144, label: 2, bag_size: 47138\n",
      "batch 179, loss: 0.0121, instance_loss: 0.0000, weighted_loss: 0.0085, label: 2, bag_size: 103823\n",
      "batch 199, loss: 0.0003, instance_loss: 0.0005, weighted_loss: 0.0003, label: 0, bag_size: 77221\n",
      "batch 219, loss: 0.0072, instance_loss: 0.0053, weighted_loss: 0.0066, label: 2, bag_size: 36978\n",
      "batch 239, loss: 0.0056, instance_loss: 0.0017, weighted_loss: 0.0045, label: 1, bag_size: 62739\n",
      "batch 259, loss: 0.0081, instance_loss: 0.0114, weighted_loss: 0.0091, label: 2, bag_size: 75833\n",
      "batch 279, loss: 0.0069, instance_loss: 0.0006, weighted_loss: 0.0050, label: 1, bag_size: 62126\n",
      "batch 299, loss: 0.0003, instance_loss: 0.0012, weighted_loss: 0.0006, label: 1, bag_size: 74454\n",
      "batch 319, loss: 0.8345, instance_loss: 0.0147, weighted_loss: 0.5885, label: 0, bag_size: 76865\n",
      "batch 339, loss: 0.0240, instance_loss: 0.0108, weighted_loss: 0.0200, label: 2, bag_size: 36978\n",
      "batch 359, loss: 0.0286, instance_loss: 0.0008, weighted_loss: 0.0202, label: 0, bag_size: 72021\n",
      "batch 379, loss: 0.0092, instance_loss: 0.0097, weighted_loss: 0.0093, label: 2, bag_size: 73189\n",
      "batch 399, loss: 0.0248, instance_loss: 0.0010, weighted_loss: 0.0176, label: 2, bag_size: 66801\n",
      "batch 419, loss: 0.0003, instance_loss: 0.0041, weighted_loss: 0.0014, label: 1, bag_size: 2939\n",
      "batch 439, loss: 0.0029, instance_loss: 0.0013, weighted_loss: 0.0024, label: 0, bag_size: 98555\n",
      "batch 459, loss: 0.0024, instance_loss: 0.0002, weighted_loss: 0.0017, label: 0, bag_size: 55538\n",
      "batch 479, loss: 0.7684, instance_loss: 0.0016, weighted_loss: 0.5384, label: 2, bag_size: 51554\n",
      "batch 499, loss: 0.8636, instance_loss: 0.0176, weighted_loss: 0.6098, label: 1, bag_size: 86619\n",
      "batch 519, loss: 0.0970, instance_loss: 0.0060, weighted_loss: 0.0697, label: 2, bag_size: 56310\n",
      "batch 539, loss: 0.0154, instance_loss: 0.0013, weighted_loss: 0.0111, label: 2, bag_size: 49458\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.999771897810219: correct 4383/4384\n",
      "Epoch: 6, train_loss: 0.1996, train_clustering_loss:  0.0075, train_error: 0.0766\n",
      "class 0: acc 0.8938547486033519, correct 160/179\n",
      "class 1: acc 0.8901098901098901, correct 162/182\n",
      "class 2: acc 0.983957219251337, correct 184/187\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4302, val_error: 0.1311, auc: 0.9680\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8333333333333334, correct 30/36\n",
      "class 1: acc 0.9444444444444444, correct 17/18\n",
      "class 2: acc 0.8571428571428571, correct 6/7\n",
      "Validation loss decreased (0.964702 --> 0.967967).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0014, instance_loss: 0.0025, weighted_loss: 0.0018, label: 1, bag_size: 53211\n",
      "batch 39, loss: 0.0045, instance_loss: 0.0280, weighted_loss: 0.0115, label: 2, bag_size: 30615\n",
      "batch 59, loss: 0.0019, instance_loss: 0.0070, weighted_loss: 0.0034, label: 1, bag_size: 55681\n",
      "batch 79, loss: 0.9805, instance_loss: 0.0000, weighted_loss: 0.6864, label: 1, bag_size: 109684\n",
      "batch 99, loss: 0.0003, instance_loss: 0.0042, weighted_loss: 0.0015, label: 2, bag_size: 68187\n",
      "batch 119, loss: 0.0069, instance_loss: 0.0021, weighted_loss: 0.0055, label: 0, bag_size: 57597\n",
      "batch 139, loss: 0.8083, instance_loss: 0.0116, weighted_loss: 0.5692, label: 0, bag_size: 35176\n",
      "batch 159, loss: 0.9972, instance_loss: 0.0139, weighted_loss: 0.7022, label: 2, bag_size: 58111\n",
      "batch 179, loss: 0.1312, instance_loss: 0.0031, weighted_loss: 0.0928, label: 0, bag_size: 70900\n",
      "batch 199, loss: 0.4919, instance_loss: 0.0023, weighted_loss: 0.3450, label: 0, bag_size: 87503\n",
      "batch 219, loss: 0.0130, instance_loss: 0.0487, weighted_loss: 0.0237, label: 2, bag_size: 46661\n",
      "batch 239, loss: 0.0006, instance_loss: 0.0037, weighted_loss: 0.0015, label: 1, bag_size: 50525\n",
      "batch 259, loss: 1.4112, instance_loss: 0.0079, weighted_loss: 0.9902, label: 0, bag_size: 21504\n",
      "batch 279, loss: 0.0029, instance_loss: 0.0048, weighted_loss: 0.0034, label: 1, bag_size: 41704\n",
      "batch 299, loss: 0.2368, instance_loss: 0.0034, weighted_loss: 0.1668, label: 2, bag_size: 23841\n",
      "batch 319, loss: 0.0124, instance_loss: 0.0030, weighted_loss: 0.0096, label: 1, bag_size: 46625\n",
      "batch 339, loss: 0.0068, instance_loss: 0.0060, weighted_loss: 0.0065, label: 0, bag_size: 11375\n",
      "batch 359, loss: 0.0003, instance_loss: 0.0021, weighted_loss: 0.0009, label: 0, bag_size: 67411\n",
      "batch 379, loss: 0.0629, instance_loss: 0.0021, weighted_loss: 0.0447, label: 1, bag_size: 85747\n",
      "batch 399, loss: 0.0350, instance_loss: 0.0047, weighted_loss: 0.0259, label: 2, bag_size: 56310\n",
      "batch 419, loss: 0.0042, instance_loss: 0.0024, weighted_loss: 0.0037, label: 1, bag_size: 10520\n",
      "batch 439, loss: 0.1516, instance_loss: 0.0260, weighted_loss: 0.1139, label: 2, bag_size: 50246\n",
      "batch 459, loss: 0.5648, instance_loss: 0.0002, weighted_loss: 0.3954, label: 1, bag_size: 49548\n",
      "batch 479, loss: 0.0002, instance_loss: 0.0006, weighted_loss: 0.0003, label: 0, bag_size: 75475\n",
      "batch 499, loss: 0.0050, instance_loss: 0.0073, weighted_loss: 0.0057, label: 2, bag_size: 59224\n",
      "batch 519, loss: 0.0012, instance_loss: 0.0183, weighted_loss: 0.0063, label: 1, bag_size: 17958\n",
      "batch 539, loss: 0.0012, instance_loss: 0.0008, weighted_loss: 0.0011, label: 0, bag_size: 80189\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.9986313868613139: correct 4378/4384\n",
      "Epoch: 7, train_loss: 0.1550, train_clustering_loss:  0.0109, train_error: 0.0566\n",
      "class 0: acc 0.9166666666666666, correct 154/168\n",
      "class 1: acc 0.9408602150537635, correct 175/186\n",
      "class 2: acc 0.9690721649484536, correct 188/194\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4672, val_error: 0.1311, auc: 0.9701\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8611111111111112, correct 31/36\n",
      "class 1: acc 0.8333333333333334, correct 15/18\n",
      "class 2: acc 1.0, correct 7/7\n",
      "Validation loss decreased (0.967967 --> 0.970101).  Saving model ...\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0065, instance_loss: 0.0055, weighted_loss: 0.0062, label: 1, bag_size: 49463\n",
      "batch 39, loss: 0.0601, instance_loss: 0.0050, weighted_loss: 0.0435, label: 2, bag_size: 54265\n",
      "batch 59, loss: 0.0129, instance_loss: 0.0074, weighted_loss: 0.0113, label: 2, bag_size: 66023\n",
      "batch 79, loss: 0.0011, instance_loss: 0.0086, weighted_loss: 0.0033, label: 2, bag_size: 30615\n",
      "batch 99, loss: 0.0029, instance_loss: 0.0053, weighted_loss: 0.0036, label: 1, bag_size: 8400\n",
      "batch 119, loss: 0.6040, instance_loss: 0.0057, weighted_loss: 0.4245, label: 1, bag_size: 67478\n",
      "batch 139, loss: 0.1359, instance_loss: 0.0310, weighted_loss: 0.1044, label: 1, bag_size: 5928\n",
      "batch 159, loss: 0.0115, instance_loss: 0.0082, weighted_loss: 0.0105, label: 2, bag_size: 40315\n",
      "batch 179, loss: 0.0163, instance_loss: 0.0035, weighted_loss: 0.0124, label: 2, bag_size: 36978\n",
      "batch 199, loss: 0.1441, instance_loss: 0.0035, weighted_loss: 0.1019, label: 0, bag_size: 74863\n",
      "batch 219, loss: 0.0651, instance_loss: 0.0075, weighted_loss: 0.0478, label: 1, bag_size: 20374\n",
      "batch 239, loss: 0.2675, instance_loss: 0.0019, weighted_loss: 0.1878, label: 0, bag_size: 51028\n",
      "batch 259, loss: 0.0859, instance_loss: 0.0026, weighted_loss: 0.0609, label: 1, bag_size: 113716\n",
      "batch 279, loss: 0.0012, instance_loss: 0.0182, weighted_loss: 0.0063, label: 2, bag_size: 68187\n",
      "batch 299, loss: 0.0374, instance_loss: 0.0008, weighted_loss: 0.0264, label: 1, bag_size: 112356\n",
      "batch 319, loss: 0.0541, instance_loss: 0.0082, weighted_loss: 0.0404, label: 0, bag_size: 74183\n",
      "batch 339, loss: 0.1302, instance_loss: 0.0005, weighted_loss: 0.0913, label: 1, bag_size: 49548\n",
      "batch 359, loss: 0.0099, instance_loss: 0.0029, weighted_loss: 0.0078, label: 0, bag_size: 78549\n",
      "batch 379, loss: 0.3431, instance_loss: 0.0020, weighted_loss: 0.2408, label: 1, bag_size: 43142\n",
      "batch 399, loss: 0.8195, instance_loss: 0.0044, weighted_loss: 0.5750, label: 2, bag_size: 23841\n",
      "batch 419, loss: 0.0011, instance_loss: 0.0008, weighted_loss: 0.0010, label: 2, bag_size: 72966\n",
      "batch 439, loss: 0.0170, instance_loss: 0.0064, weighted_loss: 0.0138, label: 2, bag_size: 54265\n",
      "batch 459, loss: 0.0102, instance_loss: 0.0040, weighted_loss: 0.0083, label: 1, bag_size: 67172\n",
      "batch 479, loss: 0.0005, instance_loss: 0.0151, weighted_loss: 0.0049, label: 2, bag_size: 64488\n",
      "batch 499, loss: 0.0318, instance_loss: 0.0011, weighted_loss: 0.0226, label: 1, bag_size: 67172\n",
      "batch 519, loss: 0.0117, instance_loss: 0.0000, weighted_loss: 0.0082, label: 2, bag_size: 103823\n",
      "batch 539, loss: 0.0015, instance_loss: 0.0017, weighted_loss: 0.0015, label: 0, bag_size: 43867\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 0.9995437956204379: correct 4382/4384\n",
      "Epoch: 8, train_loss: 0.1603, train_clustering_loss:  0.0072, train_error: 0.0602\n",
      "class 0: acc 0.9116022099447514, correct 165/181\n",
      "class 1: acc 0.9265536723163842, correct 164/177\n",
      "class 2: acc 0.9789473684210527, correct 186/190\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4706, val_error: 0.1311, auc: 0.9677\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8333333333333334, correct 30/36\n",
      "class 1: acc 0.8888888888888888, correct 16/18\n",
      "class 2: acc 1.0, correct 7/7\n",
      "EarlyStopping counter: 1 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 1.0268, instance_loss: 0.0104, weighted_loss: 0.7219, label: 0, bag_size: 34677\n",
      "batch 39, loss: 0.0146, instance_loss: 0.0012, weighted_loss: 0.0106, label: 2, bag_size: 72015\n",
      "batch 59, loss: 0.2834, instance_loss: 0.0183, weighted_loss: 0.2038, label: 0, bag_size: 28029\n",
      "batch 79, loss: 0.0655, instance_loss: 0.0142, weighted_loss: 0.0501, label: 2, bag_size: 13963\n",
      "batch 99, loss: 0.0141, instance_loss: 0.0004, weighted_loss: 0.0100, label: 1, bag_size: 75744\n",
      "batch 119, loss: 0.0010, instance_loss: 0.0072, weighted_loss: 0.0029, label: 2, bag_size: 84436\n",
      "batch 139, loss: 1.3955, instance_loss: 0.0005, weighted_loss: 0.9770, label: 0, bag_size: 90911\n",
      "batch 159, loss: 0.0082, instance_loss: 0.0022, weighted_loss: 0.0064, label: 0, bag_size: 70854\n",
      "batch 179, loss: 0.0196, instance_loss: 0.0019, weighted_loss: 0.0142, label: 0, bag_size: 53188\n",
      "batch 199, loss: 0.0297, instance_loss: 0.0274, weighted_loss: 0.0290, label: 1, bag_size: 12022\n",
      "batch 219, loss: 0.3219, instance_loss: 0.0025, weighted_loss: 0.2261, label: 1, bag_size: 35236\n",
      "batch 239, loss: 0.0033, instance_loss: 0.0038, weighted_loss: 0.0035, label: 2, bag_size: 72686\n",
      "batch 259, loss: 1.3465, instance_loss: 0.0180, weighted_loss: 0.9479, label: 1, bag_size: 22039\n",
      "batch 279, loss: 0.0021, instance_loss: 0.0015, weighted_loss: 0.0019, label: 1, bag_size: 10935\n",
      "batch 299, loss: 0.3865, instance_loss: 0.0029, weighted_loss: 0.2714, label: 1, bag_size: 43142\n",
      "batch 319, loss: 0.1140, instance_loss: 0.0004, weighted_loss: 0.0799, label: 2, bag_size: 91267\n",
      "batch 339, loss: 0.0178, instance_loss: 0.0081, weighted_loss: 0.0149, label: 1, bag_size: 47660\n",
      "batch 359, loss: 0.3353, instance_loss: 0.0011, weighted_loss: 0.2350, label: 2, bag_size: 16282\n",
      "batch 379, loss: 0.3497, instance_loss: 0.0337, weighted_loss: 0.2549, label: 0, bag_size: 4963\n",
      "batch 399, loss: 0.0089, instance_loss: 0.0038, weighted_loss: 0.0074, label: 0, bag_size: 80774\n",
      "batch 419, loss: 3.9904, instance_loss: 0.0005, weighted_loss: 2.7934, label: 1, bag_size: 48985\n",
      "batch 439, loss: 0.1705, instance_loss: 0.0002, weighted_loss: 0.1194, label: 2, bag_size: 72015\n",
      "batch 459, loss: 0.0100, instance_loss: 0.0089, weighted_loss: 0.0096, label: 2, bag_size: 39234\n",
      "batch 479, loss: 0.0027, instance_loss: 0.0013, weighted_loss: 0.0023, label: 2, bag_size: 46661\n",
      "batch 499, loss: 0.2291, instance_loss: 0.0024, weighted_loss: 0.1611, label: 1, bag_size: 66144\n",
      "batch 519, loss: 0.0094, instance_loss: 0.0005, weighted_loss: 0.0067, label: 0, bag_size: 72078\n",
      "batch 539, loss: 0.0139, instance_loss: 0.0030, weighted_loss: 0.0107, label: 0, bag_size: 43883\n",
      "\n",
      "\n",
      "class 0 clustering acc 1.0: correct 4384/4384\n",
      "class 1 clustering acc 1.0: correct 4384/4384\n",
      "Epoch: 9, train_loss: 0.1916, train_clustering_loss:  0.0057, train_error: 0.0675\n",
      "class 0: acc 0.8932584269662921, correct 159/178\n",
      "class 1: acc 0.9333333333333333, correct 182/195\n",
      "class 2: acc 0.9714285714285714, correct 170/175\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Val Set, val_loss: 0.4253, val_error: 0.1311, auc: 0.9661\n",
      "class 0 clustering acc 1.0: correct 488/488\n",
      "class 1 clustering acc 1.0: correct 488/488\n",
      "class 0: acc 0.8611111111111112, correct 31/36\n",
      "class 1: acc 0.8333333333333334, correct 15/18\n",
      "class 2: acc 1.0, correct 7/7\n",
      "EarlyStopping counter: 2 out of 10\n",
      "\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n",
      "/dssg/home/acct-medftn/medftn/BEPT/Model/benchMark/HIPT/2-Weakly-Supervised-Subtyping/utils/utils.py:36: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  img = torch.cat([torch.tensor(item[0]) for item in batch], dim = 0)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "batch 19, loss: 0.0145, instance_loss: 0.0023, weighted_loss: 0.0108, label: 1, bag_size: 29399\n",
      "batch 39, loss: 0.0723, instance_loss: 0.0023, weighted_loss: 0.0513, label: 1, bag_size: 7532\n",
      "batch 59, loss: 0.2895, instance_loss: 0.0036, weighted_loss: 0.2037, label: 2, bag_size: 87135\n",
      "batch 79, loss: 0.0636, instance_loss: 0.0011, weighted_loss: 0.0449, label: 1, bag_size: 28349\n",
      "batch 99, loss: 0.0840, instance_loss: 0.0014, weighted_loss: 0.0592, label: 1, bag_size: 39770\n",
      "batch 119, loss: 0.3450, instance_loss: 0.0022, weighted_loss: 0.2422, label: 1, bag_size: 85747\n",
      "batch 139, loss: 0.0025, instance_loss: 0.0016, weighted_loss: 0.0022, label: 1, bag_size: 61135\n",
      "batch 159, loss: 0.6581, instance_loss: 0.0123, weighted_loss: 0.4644, label: 1, bag_size: 35236\n",
      "batch 179, loss: 0.0002, instance_loss: 0.0101, weighted_loss: 0.0032, label: 2, bag_size: 30615\n",
      "batch 199, loss: 0.0176, instance_loss: 0.0031, weighted_loss: 0.0133, label: 2, bag_size: 91267\n",
      "batch 219, loss: 0.0180, instance_loss: 0.0013, weighted_loss: 0.0130, label: 1, bag_size: 96881\n",
      "batch 239, loss: 0.0004, instance_loss: 0.0048, weighted_loss: 0.0017, label: 2, bag_size: 30615\n",
      "batch 259, loss: 0.0099, instance_loss: 0.0021, weighted_loss: 0.0076, label: 0, bag_size: 54937\n",
      "batch 279, loss: 0.1067, instance_loss: 0.0012, weighted_loss: 0.0751, label: 2, bag_size: 58111\n",
      "batch 299, loss: 0.2341, instance_loss: 0.0019, weighted_loss: 0.1644, label: 0, bag_size: 108807\n",
      "batch 319, loss: 0.0825, instance_loss: 0.0127, weighted_loss: 0.0616, label: 0, bag_size: 16184\n",
      "batch 339, loss: 0.1376, instance_loss: 0.0023, weighted_loss: 0.0970, label: 2, bag_size: 59224\n",
      "batch 359, loss: 0.0082, instance_loss: 0.0025, weighted_loss: 0.0065, label: 0, bag_size: 90654\n"
     ]
    }
   ],
   "source": [
    "%run ./script_10fold/subtyping/kidney/main_10Fold_beph_0.75.py --data_root_dir ./kidney/image/ \\\n",
    "--model_type clam_sb \\\n",
    "--task tcga_kidney_subtype \\\n",
    "--prop 1 \\\n",
    "--max_epoch 20 \\\n",
    "--k 10 \\\n",
    "--k_start 0 \\\n",
    "--lr 5e-5 \\\n",
    "--results_dir ./results_subtyping/p75/kidney \\\n",
    "--early_stopping\n",
    "# --pretrain_4k vit4k_xs_dino\n",
    "# 1e-4"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "2ba1742d-7914-428e-91f7-75145eb286db",
   "metadata": {
    "tags": []
   },
   "outputs": [],
   "source": [
    "%run ./script_5fold/main_5Fold_beph_survival_CRC.py --data_root_dir ./kidney/image/ \\\n",
    "--model_type clam_sb \\\n",
    "--task tcga_crc_subtype \\\n",
    "--prop 1 \\\n",
    "--max_epoch 20 \\\n",
    "--k 5 \\\n",
    "--k_start 0 \\\n",
    "--lr  2e-3 \\\n",
    "--seed 123 \\\n",
    "--results_dir ./results_survival/tcga_crc_subtype\\\n",
    "--early_stopping\n",
    "# --pretrain_4k vit4k_xs_dino\n",
    "# 1e-4"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "mmselfsup_yzc",
   "language": "python",
   "name": "mmselfsup_yzc"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.9.17"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
