{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "a8f9ba19",
   "metadata": {},
   "outputs": [],
   "source": [
    "import sys\n",
    "sys.path.append('/data/run01/sczc619/LML/MetaTSNE')\n",
    "from bsne_utils import *"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "b2c7574c",
   "metadata": {},
   "outputs": [],
   "source": [
    "class GraphTransformerLayer(nn.Module):\n",
    "    def __init__(self, d_model=256, nhead=8, dim_feedforward=768, dropout=0.1):\n",
    "        super().__init__()\n",
    "        self.linear1 = nn.Linear(d_model, dim_feedforward)\n",
    "        self.self_attn = nn.MultiheadAttention(\n",
    "            embed_dim=d_model,\n",
    "            num_heads=nhead,\n",
    "            dropout=dropout,\n",
    "            batch_first=True\n",
    "        )\n",
    "        self.dropout = nn.Dropout(dropout)\n",
    "        self.linear2 = nn.Linear(dim_feedforward, d_model)\n",
    "        self.norm1 = nn.LayerNorm(d_model)\n",
    "        self.norm2 = nn.LayerNorm(d_model)\n",
    "        self.dropout1 = nn.Dropout(dropout)\n",
    "        self.dropout2 = nn.Dropout(dropout)\n",
    "        self.activation = nn.ReLU()\n",
    "\n",
    "    def forward(self, src, adj_mask=None):\n",
    "        attn_mask = self._create_attention_mask(adj_mask)\n",
    "\n",
    "        src2 = self.norm1(src)\n",
    "        src2, attn_weights = self.self_attn(\n",
    "            src2, src2, src2,\n",
    "            attn_mask=attn_mask\n",
    "        )\n",
    "        src = src + self.dropout1(src2)\n",
    "\n",
    "        src2 = self.norm2(src)\n",
    "        src2 = self.linear2(self.dropout(self.activation(self.linear1(src2))))\n",
    "        src = src + self.dropout2(src2)\n",
    "        return src\n",
    "\n",
    "    def _create_attention_mask(self, adj_mask):\n",
    "        if adj_mask is None:\n",
    "            return None\n",
    "        mask = (adj_mask == 0).bool()\n",
    "        mask = mask.repeat(self.self_attn.num_heads, 1, 1)\n",
    "        return mask\n",
    "\n",
    "\n",
    "class multi_HOGRL_Transformer(nn.Module):\n",
    "    def __init__(self, in_feat, out_feat, relation_nums=3, d_model=256,\n",
    "                 nhead=8, num_layers=5, dim_feedforward=768,\n",
    "                 drop_rate=0.6, layers_tree=2, tsne_weight=0.1):\n",
    "        super().__init__()\n",
    "        self.relation_nums = relation_nums\n",
    "        self.d_model = d_model\n",
    "\n",
    "        self.feature_proj = nn.Linear(in_feat, d_model)\n",
    "\n",
    "        self.transformer_layers = nn.ModuleList([\n",
    "            nn.ModuleList([GraphTransformerLayer(\n",
    "                d_model=d_model,\n",
    "                nhead=nhead,\n",
    "                dim_feedforward=dim_feedforward,\n",
    "                dropout=drop_rate\n",
    "            ) for _ in range(num_layers)])\n",
    "            for _ in range(relation_nums)\n",
    "        ])\n",
    "\n",
    "        self.tree_projs = nn.ModuleList([\n",
    "            nn.ModuleList([nn.Sequential(\n",
    "                nn.Linear(d_model, dim_feedforward),\n",
    "                nn.ReLU(),\n",
    "                nn.Linear(dim_feedforward, d_model)\n",
    "            ) for _ in range(layers_tree)])\n",
    "            for _ in range(relation_nums)\n",
    "        ])\n",
    "\n",
    "        self.gating_networks = nn.ModuleList([\n",
    "            nn.ModuleList([nn.Linear(d_model, 1)\n",
    "                           for _ in range(layers_tree)])\n",
    "            for _ in range(relation_nums)\n",
    "        ])\n",
    "\n",
    "        self.classifier = nn.Sequential(\n",
    "            nn.Linear(relation_nums * d_model, 512),\n",
    "            nn.ReLU(),\n",
    "            nn.Dropout(drop_rate),\n",
    "            nn.Linear(512, out_feat)\n",
    "        )\n",
    "\n",
    "        self.tsne_weight = tsne_weight\n",
    "\n",
    "    def forward(self, x, edge_indexs, sub_nodes=None):\n",
    "        if sub_nodes is not None:\n",
    "            x = x[sub_nodes]\n",
    "\n",
    "        x = self.feature_proj(x)\n",
    "\n",
    "        relation_outputs = []\n",
    "        for rel_idx in range(self.relation_nums):\n",
    "            edge_index = edge_indexs[rel_idx][0]\n",
    "            adj_matrix = to_dense_adj(edge_index, max_num_nodes=x.size(0))[0]\n",
    "            tree_indices = edge_indexs[rel_idx][1]\n",
    "\n",
    "            h = x\n",
    "            for layer in self.transformer_layers[rel_idx]:\n",
    "                h = layer(h, adj_matrix.bool())\n",
    "\n",
    "            tree_features = []\n",
    "            for tree_idx, tree_edges in enumerate(tree_indices):\n",
    "                tree_adj = to_dense_adj(tree_edges, max_num_nodes=x.size(0))[0]\n",
    "                h_tree = x\n",
    "                for layer in self.tree_projs[rel_idx][tree_idx]:\n",
    "                    h_tree = layer(h_tree)\n",
    "                h_tree = self.transformer_layers[rel_idx][-1](h_tree, tree_adj.bool())\n",
    "                tree_features.append(h_tree)\n",
    "\n",
    "            gates = torch.stack([\n",
    "                self.gating_networks[rel_idx][i](feat)\n",
    "                for i, feat in enumerate(tree_features)\n",
    "            ], dim=-1)\n",
    "            alpha = F.softmax(gates, dim=-1)\n",
    "            fused_tree = sum([feat * alpha[:, :, i]\n",
    "                              for i, feat in enumerate(tree_features)])\n",
    "\n",
    "            relation_output = h + fused_tree\n",
    "            relation_outputs.append(relation_output)\n",
    "\n",
    "        combined = torch.cat(relation_outputs, dim=-1)\n",
    "\n",
    "        logits = self.classifier(combined)\n",
    "        logits = F.log_softmax(logits, dim=-1)\n",
    "\n",
    "        tsne_feats = torch.stack(relation_outputs, dim=1).mean(dim=1)\n",
    "\n",
    "        return logits, tsne_feats"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "288f6799",
   "metadata": {},
   "outputs": [],
   "source": [
    "def test(idx_eval, y_eval, model, feat_data, edge_indexs, device):\n",
    "    model.eval()\n",
    "    with torch.no_grad():\n",
    "        logits, _ = model(feat_data.to(device), edge_indexs, sub_nodes=None)\n",
    "        x_softmax = torch.exp(logits).cpu().detach()\n",
    "        positive_class_probs = x_softmax[:, 1].numpy()[np.array(idx_eval)]\n",
    "        auc_score = roc_auc_score(np.array(y_eval), np.array(positive_class_probs))\n",
    "        ap_score = average_precision_score(np.array(y_eval), np.array(positive_class_probs))\n",
    "        label_prob = (np.array(positive_class_probs) >= 0.5).astype(int)\n",
    "        f1_score_val = f1_score(np.array(y_eval), label_prob, average='macro')\n",
    "        g_mean = calculate_g_mean(np.array(y_eval), label_prob)\n",
    "\n",
    "    return auc_score, ap_score, f1_score_val, g_mean\n",
    "\n",
    "\n",
    "def calculate_tsne_loss(emb_p, emb_u, dist_sub_p, dist_matrix, batch_p_global, batch_u_global, temperature=1,\n",
    "                        eps=1e-12):\n",
    "\n",
    "    device = emb_p.device\n",
    "    batch_size = emb_p.size(0)\n",
    "\n",
    "    # --- 局部项：基于B_p子图 ---\n",
    "    # 生成子图局部索引映射表\n",
    "    subnode_to_local = {node: i for i, node in enumerate(batch_p_global)}\n",
    "    local_indices = [subnode_to_local[node] for node in batch_p_global]\n",
    "\n",
    "    # 提取局部距离矩阵\n",
    "    dist_p = dist_sub_p[local_indices][:, local_indices]\n",
    "\n",
    "    # 计算P\n",
    "    # P = torch.exp(-dist_p ** 2)\n",
    "    P = (1.0 + dist_p ** 2) ** -1\n",
    "    P.fill_diagonal_(0)\n",
    "    P = (P + P.T) / 2  # 对称化\n",
    "    P = P / (P.sum(dim=1, keepdim=True) + eps)\n",
    "    # P = P / P.sum()\n",
    "    P = torch.clamp(P, min=eps)\n",
    "\n",
    "    # 计算Q\n",
    "    # pairwise_dist = torch.cdist(emb_p, emb_p)\n",
    "    pairwise_dist = torch.cdist(emb_p, emb_p, p=2)\n",
    "    Q = (1.0 + pairwise_dist ** 2 / temperature) ** -1\n",
    "    Q.fill_diagonal_(0)\n",
    "    Q = (Q + Q.T) / 2\n",
    "    Q = Q / (Q.sum(dim=1, keepdim=True) + eps)\n",
    "    # Q = Q / Q.sum()\n",
    "    Q = torch.clamp(Q, min=eps)\n",
    "\n",
    "\n",
    "    # 局部损失：KL散度\n",
    "    loss_local = (torch.log(P) - torch.log(Q)).mean()\n",
    "\n",
    "    # --- 全局项 ---\n",
    "    # # 计算B_p到B_u的距离\n",
    "    # dist_pu = torch.cdist(emb_p, emb_u)\n",
    "    #\n",
    "    # # 分子：sum_{x∈B_u} e^{-dθ(xi,x)}\n",
    "    # d_bu = torch.exp(-dist_pu).sum(dim=1)\n",
    "    #\n",
    "    # # 分母：sum_{x∈B_p} e^{-dθ(xi,x)}\n",
    "    # d_bp = torch.exp(-pairwise_dist).sum(dim=1) + eps\n",
    "    #\n",
    "    # # 计算k_Bp（使用全局距离矩阵）\n",
    "    #\n",
    "    # p_xi_full = torch.exp(-dist_matrix).sum(dim=1)  # [num_nodes]\n",
    "    # dist_sub_p = dist_matrix[batch_p_global][:, batch_p_global]\n",
    "    # sum_p_xi = torch.exp(-dist_sub_p).sum(dim=1)\n",
    "    # k_Bp = (sum_p_xi / p_xi_full[batch_p_global]) * (dist_matrix.shape[0] / batch_size)\n",
    "    #\n",
    "    #\n",
    "    # loss_global = torch.log(k_Bp * d_bu / d_bp + eps).mean()\n",
    "\n",
    "\n",
    "    # 计算emb_p到emb_u的距离（平方欧氏距离）\n",
    "    dist_pu_sq = torch.cdist(emb_p, emb_u, p=2) ** 2\n",
    "    d_bu = (1.0 + dist_pu_sq / temperature) ** -1\n",
    "    d_bu = d_bu.sum(dim=1)\n",
    "\n",
    "    pairwise_dist_sq = pairwise_dist ** 2\n",
    "    d_bp = (1.0 + pairwise_dist_sq / temperature) ** -1\n",
    "    d_bp = d_bp.sum(dim=1) + eps\n",
    "\n",
    "    # 计算k_Bp（保持原逻辑）\n",
    "    p_xi_full = (1.0 + dist_matrix ** 2) ** -1 \n",
    "    sum_p_xi = p_xi_full[batch_p_global][:, batch_p_global].sum(dim=1)\n",
    "    k_Bp = (sum_p_xi / p_xi_full[batch_p_global].sum(dim=1)) * (dist_matrix.shape[0] / batch_size)\n",
    "\n",
    "    ratio = (k_Bp.unsqueeze(1) * d_bu) / d_bp.unsqueeze(1)\n",
    "    loss_global = torch.log(ratio.clamp(min=eps)).mean()\n",
    "\n",
    "#     print(len(k_Bp),f\"k_Bp: {k_Bp}\")\n",
    "#     print(f\"d_bu mean: {d_bu.mean().item()}, d_bu max: {d_bu.max().item()}, d_bu min: {d_bu.min().item()}\")\n",
    "#     print(f\"d_bp mean: {d_bp.mean().item()}, d_bp max: {d_bp.max().item()}, d_bp min: {d_bp.min().item()}\")\n",
    "\n",
    "\n",
    "#     print(f\"P mean: {P.mean().item()}, P max: {P.max().item()}, P min: {P.min().item()}\")\n",
    "#     print(f\"Q mean: {Q.mean().item()}, Q max: {Q.max().item()}, Q min: {Q.min().item()}\")\n",
    "#     print(f\"local_loss: {loss_local.item()},global_loss: {loss_global.item()}\")\n",
    "\n",
    "    return loss_local + loss_global"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "911d1740",
   "metadata": {},
   "outputs": [],
   "source": [
    "# bsne算法\n",
    "def bsne_main(args, model_config):\n",
    "    device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n",
    "    # device = torch.device('cpu')\n",
    "    print(device)\n",
    "\n",
    "    timestamp = time.strftime(\"%Y%m%d-%H%M%S\")\n",
    "    writer = SummaryWriter(f'runs/{args[\"dataset\"]}_{timestamp}')\n",
    "\n",
    "    print('loading data...')\n",
    "    prefix = \"/data/run01/sczc619/LML/MetaTSNE/data/\"\n",
    "    edge_indexs, feat_data, labels = load_data(args['dataset'], args['layers_tree'], prefix)\n",
    "\n",
    "    np.random.seed(args['seed'])\n",
    "    rd.seed(args['seed'])\n",
    "\n",
    "    if args['dataset'] == 'yelp':\n",
    "        index = list(range(len(labels)))\n",
    "        idx_train_val, idx_test, y_train_val, y_test = train_test_split(index, labels, stratify=labels,\n",
    "                                                                        test_size=args['test_size'], random_state=2,\n",
    "                                                                        shuffle=True)\n",
    "        idx_train, idx_val, y_train, y_val = train_test_split(idx_train_val, y_train_val,\n",
    "                                                              stratify=y_train_val, test_size=args['val_size'],\n",
    "                                                              random_state=2, shuffle=True)\n",
    "        dist_path = os.path.join(prefix, \"Yelp_shortest_distance.pkl\")\n",
    "    elif args['dataset'] == 'amazon':\n",
    "        index = list(range(3305, len(labels)))\n",
    "        idx_train_val, idx_test, y_train_val, y_test = train_test_split(index, labels[3305:],\n",
    "                                                                        stratify=labels[3305:],\n",
    "                                                                        test_size=args['test_size'],\n",
    "                                                                        random_state=2, shuffle=True)\n",
    "        idx_train, idx_val, y_train, y_val = train_test_split(idx_train_val, y_train_val,\n",
    "                                                              stratify=y_train_val, test_size=args['val_size'],\n",
    "                                                              random_state=2, shuffle=True)\n",
    "        dist_path = os.path.join(prefix, \"Amazon_shortest_distance.pkl\")\n",
    "\n",
    "    with open(dist_path, 'rb') as f:\n",
    "        dist_data = pickle.load(f)\n",
    "        dist_matrix = torch.tensor(dist_data['dist_matrix']).to(device)\n",
    "\n",
    "\n",
    "    adj_dict = defaultdict(list)\n",
    "    for rel in edge_indexs:\n",
    "        edge_index = rel[0].cpu().numpy()\n",
    "        for src, dst in zip(edge_index[0], edge_index[1]):\n",
    "            adj_dict[src].append(dst)\n",
    "\n",
    "    gnn_model = multi_HOGRL_Transformer(\n",
    "        in_feat=feat_data.shape[1],\n",
    "        out_feat=2,\n",
    "        relation_nums=len(edge_indexs),\n",
    "        d_model=model_config['d_model'],\n",
    "        nhead=args['num_heads'],\n",
    "        num_layers=3,\n",
    "        dim_feedforward=256,\n",
    "        drop_rate=args['drop_rate'],\n",
    "        layers_tree=args['layers_tree'],\n",
    "        tsne_weight=args['tsne_weight']\n",
    "    ).to(device)\n",
    "\n",
    "    for edge_index in edge_indexs:\n",
    "        edge_index[0] = edge_index[0].to(device)\n",
    "        edge_index[1] = [tensor.to(device) for tensor in edge_index[1]]\n",
    "    feat_data = torch.tensor(feat_data).float().to(device)\n",
    "\n",
    "\n",
    "    print(\"\\n=== Starting Pretraining ===\")\n",
    "\n",
    "    gnn_model.classifier.requires_grad_(False)\n",
    "    optimizer = torch.optim.AdamW(\n",
    "        filter(lambda p: p.requires_grad, gnn_model.parameters()),\n",
    "        lr=args['pretrain_lr'],\n",
    "        weight_decay=1e-4\n",
    "    )\n",
    "    pretrain_best_loss = float('inf')\n",
    "    pretrain_no_improve = 0\n",
    "    pretrain_early_stop = False\n",
    "\n",
    "    for epoch in range(args['pretrain_epochs']):\n",
    "        if pretrain_early_stop:\n",
    "            break\n",
    "\n",
    "        gnn_model.train()\n",
    "        optimizer.zero_grad()\n",
    "\n",
    "        # 第一次采样\n",
    "        batch_centers = rd.sample(range(feat_data.shape[0]), args['batch_size'])\n",
    "        sub_nodes_p = sample_subgraph(batch_centers, dist_matrix, args['sample_size'])\n",
    "\n",
    "        # 第二次采样\n",
    "        batch_u_global = np.random.choice(feat_data.shape[0], size=len(sub_nodes_p), replace=False)\n",
    "\n",
    "        # 生成B_p嵌入\n",
    "        feat_sub_p = feat_data[sub_nodes_p]\n",
    "        _, embeddings_p = gnn_model(feat_sub_p, edge_indexs, sub_nodes=None)\n",
    "\n",
    "        # 生成B_u嵌入\n",
    "        feat_u = feat_data[batch_u_global]\n",
    "        with torch.no_grad():\n",
    "            _, embeddings_u = gnn_model(feat_u, edge_indexs, sub_nodes=None)\n",
    "\n",
    "        # 获取B_p子图距离矩阵\n",
    "        dist_sub_p = dist_matrix[sub_nodes_p][:, sub_nodes_p]\n",
    "\n",
    "        # 计算损失\n",
    "        tsne_loss = calculate_tsne_loss(\n",
    "            embeddings_p,  # 子图嵌入\n",
    "            embeddings_u,  # 全局采样嵌入\n",
    "            dist_sub_p,  # B_p子图距离\n",
    "            dist_matrix,  # 全图距离\n",
    "            sub_nodes_p,  # B_p全局索引\n",
    "            batch_u_global,  # B_u全局索引\n",
    "            temperature=100,\n",
    "            eps=1e-10\n",
    "        ) * args['tsne_weight']\n",
    "\n",
    "\n",
    "        tsne_loss.backward()\n",
    "        \n",
    "        torch.nn.utils.clip_grad_norm_(gnn_model.parameters(), max_norm=1.0)\n",
    "        \n",
    "        optimizer.step()\n",
    "\n",
    "        # print(f\"Epoch {epoch}: Loss={tsne_loss.item():.4f}\")\n",
    "\n",
    "        if tsne_loss.item() < pretrain_best_loss:\n",
    "            pretrain_best_loss = tsne_loss.item()\n",
    "            pretrain_no_improve = 0\n",
    "        else:\n",
    "            pretrain_no_improve += 1\n",
    "\n",
    "        if pretrain_no_improve >= args['pretrain_patience']:\n",
    "            print(f\"Pretrain early stopping at epoch {epoch}\")\n",
    "            pretrain_early_stop = True\n",
    "\n",
    "        writer.add_scalar('Pretrain/TSNE_Loss', tsne_loss.item(), epoch)\n",
    "\n",
    "        if epoch % 5 == 0:\n",
    "            print(f'Pretrain Epoch: {epoch:03d}, TSNE Loss: {tsne_loss.item():.4f}')\n",
    "\n",
    "    print(\"\\n=== Starting Fine-tuning ===\")\n",
    "    gnn_model.classifier.requires_grad_(True)\n",
    "    optimizer = torch.optim.AdamW(\n",
    "        filter(lambda p: p.requires_grad, gnn_model.parameters()),\n",
    "        lr=args['finetune_lr'],\n",
    "        weight_decay=1e-4\n",
    "    )\n",
    "    scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(\n",
    "        optimizer, mode='max', factor=0.5, patience=10, verbose=True\n",
    "    )\n",
    "\n",
    "    best_val_auc = 0.0\n",
    "    best_model_state = None\n",
    "    train_pos, train_neg = pos_neg_split(idx_train, y_train)\n",
    "\n",
    "    no_improve_epochs = 0\n",
    "    early_stop = False\n",
    "\n",
    "    for epoch in range(args['num_epochs']):\n",
    "        if early_stop:\n",
    "            break\n",
    "\n",
    "        gnn_model.train()\n",
    "        total_loss = 0.0\n",
    "\n",
    "        batch_centers = rd.sample(train_pos + train_neg, args['batch_size'])\n",
    "        sub_nodes = sample_subgraph(batch_centers, dist_matrix, args['sample_size'])\n",
    "        batch_mask = [i for i, node in enumerate(sub_nodes) if node in batch_centers]\n",
    "\n",
    "        feat_sub = feat_data[sub_nodes]\n",
    "        labels_sub = labels[sub_nodes]\n",
    "\n",
    "        for _ in range(len(sub_nodes) // args['batch_size']):\n",
    "            optimizer.zero_grad()\n",
    "\n",
    "            out, _ = gnn_model(feat_sub, edge_indexs, sub_nodes=None)\n",
    "            cls_loss = F.nll_loss(out[batch_mask], torch.LongTensor(labels_sub[batch_mask]).to(device))\n",
    "\n",
    "            cls_loss.backward()\n",
    "            optimizer.step()\n",
    "\n",
    "            total_loss += cls_loss.item()\n",
    "\n",
    "        avg_loss = total_loss / (len(sub_nodes) // args['batch_size'])\n",
    "        writer.add_scalar('FineTune/Train_Loss', avg_loss, epoch)\n",
    "\n",
    "        if epoch % 5 == 0:\n",
    "            val_auc, val_ap, val_f1, val_g_mean = test(idx_val, y_val, gnn_model, feat_data, edge_indexs, device)\n",
    "\n",
    "            writer.add_scalar('Validation/AUC', val_auc, epoch)\n",
    "            writer.add_scalar('Validation/F1', val_f1, epoch)\n",
    "            writer.add_scalar('Validation/GMean', val_g_mean, epoch)\n",
    "\n",
    "            print(f'Epoch: {epoch:03d} | Loss: {avg_loss:.4f} | Val AUC: {val_auc:.4f} | Val F1: {val_f1:.4f}')\n",
    "\n",
    "            scheduler.step(val_auc)\n",
    "\n",
    "            if val_auc > best_val_auc:\n",
    "                best_val_auc = val_auc\n",
    "                no_improve_epochs = 0\n",
    "                best_model_state = copy.deepcopy(gnn_model.state_dict())\n",
    "            else:\n",
    "                no_improve_epochs += 1\n",
    "\n",
    "            if no_improve_epochs >= args['patience']:\n",
    "                print(f\"Early stopping at epoch {epoch}\")\n",
    "                early_stop = True\n",
    "\n",
    "    gnn_model.load_state_dict(best_model_state)\n",
    "    test_auc, test_ap, test_f1, test_g_mean = test(idx_test, y_test, gnn_model, feat_data, edge_indexs, device)\n",
    "    print(f'\\n=== Final Test Results ===')\n",
    "    print(f'Test AUC: {test_auc:.4f} | Test AP: {test_ap:.4f} | Test F1: {test_f1:.4f} | G-mean: {test_g_mean:.4f}')\n",
    "    writer.close()\n",
    "    \n",
    "    \n",
    "    return test_auc, test_ap, test_f1, test_g_mean\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "f154023f",
   "metadata": {},
   "outputs": [],
   "source": [
    "def count_parameters(model):\n",
    "    return sum(p.numel() for p in model.parameters() if p.requires_grad)\n",
    "\n",
    "def run_scaling_experiments():\n",
    "    device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n",
    "    print(f\"Using device: {device}\")\n",
    "\n",
    "    # 定义不同规模的模型配置\n",
    "    configs = [\n",
    "        {   # Tiny\n",
    "            'd_model': 64,\n",
    "            'num_heads': 2,\n",
    "            'num_layers': 2,\n",
    "            'dim_feedforward': 128,\n",
    "            'name': 'Tiny'\n",
    "        },\n",
    "        {   # Small\n",
    "            'd_model': 128,\n",
    "            'num_heads': 4,\n",
    "            'num_layers': 3,\n",
    "            'dim_feedforward': 256,\n",
    "            'name': 'Small'\n",
    "        },\n",
    "        {   # Medium\n",
    "            'd_model': 256,\n",
    "            'num_heads': 4,\n",
    "            'num_layers': 4,\n",
    "            'dim_feedforward': 512,\n",
    "            'name': 'Medium'\n",
    "        }\n",
    "#         ,\n",
    "#         {   # Large\n",
    "#             'd_model': 512,\n",
    "#             'num_heads': 8,\n",
    "#             'num_layers': 6,\n",
    "#             'dim_feedforward': 1024,\n",
    "#             'name': 'Large'\n",
    "#         }\n",
    "    ]\n",
    "\n",
    "    results = []\n",
    "    \n",
    "    for cfg in configs:\n",
    "        print(f\"\\n=== Running experiment for {cfg['name']} model ===\")\n",
    "        \n",
    "        # 加载数据\n",
    "        edge_indexs, feat_data, labels = load_data(\"amazon\", k=2, prefix=\"/data/run01/sczc619/LML/MetaTSNE/data/\")\n",
    "        \n",
    "        # 创建模型\n",
    "        model = multi_HOGRL_Transformer(\n",
    "            in_feat=feat_data.shape[1],\n",
    "            out_feat=2,\n",
    "            relation_nums=3,\n",
    "            d_model=cfg['d_model'],\n",
    "            nhead=cfg['num_heads'],\n",
    "            num_layers=cfg['num_layers'],\n",
    "            dim_feedforward=cfg['dim_feedforward'],\n",
    "            drop_rate=0.5,\n",
    "            layers_tree=2,\n",
    "            tsne_weight=0.3\n",
    "        ).to(device)\n",
    "        \n",
    "        # 计算参数量\n",
    "        params = count_parameters(model)\n",
    "        print(f\"Total trainable parameters: {params/1e6:.2f}M\")\n",
    "        \n",
    "        # 设置训练参数\n",
    "        experiment_args = {\n",
    "            \"dataset\": \"amazon\",\n",
    "            \"batch_size\": 64,\n",
    "            \"sample_size\": 30,\n",
    "            \"num_heads\": cfg['num_heads'],\n",
    "            \"pretrain_epochs\": 200,\n",
    "            \"pretrain_lr\": 0.001,\n",
    "            \"finetune_lr\": 0.0005,\n",
    "            \"num_epochs\": 300,\n",
    "            \"pretrain_patience\": 30,\n",
    "            \"patience\": 30,\n",
    "            \"test_size\": 0.6,\n",
    "            \"val_size\": 0.5,\n",
    "            \"seed\": 42,\n",
    "            \"layers_tree\":2,\n",
    "            \"drop_rate\": 0.5,\n",
    "            \"tsne_weight\":0.3\n",
    "        }\n",
    "        \n",
    "        # 运行实验\n",
    "        test_auc, test_ap, test_f1, test_g_mean = bsne_main(experiment_args, model_config=cfg)\n",
    "        \n",
    "        # 记录结果\n",
    "        results.append({\n",
    "            'Model': cfg['name'],\n",
    "            'Parameters(M)': params/1e6,\n",
    "            'Layers': cfg['num_layers'],\n",
    "            'HiddenDim': cfg['d_model'],\n",
    "            'AUC': test_auc,\n",
    "            'F1': test_f1,\n",
    "            'G-Mean': test_g_mean\n",
    "        })\n",
    "    \n",
    "    # 打印结果表格\n",
    "    print(\"\\n=== Scaling Experiment Results ===\")\n",
    "    print(\"{:<8} {:<12} {:<8} {:<10} {:<8} {:<8} {:<8}\".format(\n",
    "        'Model', 'Params(M)', 'Layers', 'HiddenDim', 'AUC', 'F1', 'G-Mean'))\n",
    "    for res in results:\n",
    "        print(\"{:<8} {:<12.2f} {:<8} {:<10} {:<8.4f} {:<8.4f} {:<8.4f}\".format(\n",
    "            res['Model'],\n",
    "            res['Parameters(M)'],\n",
    "            res['Layers'],\n",
    "            res['HiddenDim'],\n",
    "            res['AUC'],\n",
    "            res['F1'],\n",
    "            res['G-Mean']\n",
    "        ))\n",
    "        \n",
    "    # 可视化结果\n",
    "    plt.figure(figsize=(12, 6))\n",
    "    \n",
    "    # AUC vs Parameters\n",
    "    plt.subplot(1, 2, 1)\n",
    "    x = [r['Parameters(M)'] for r in results]\n",
    "    y = [r['AUC'] for r in results]\n",
    "    plt.plot(x, y, 'o-')\n",
    "    plt.xlabel('Parameters (Millions)')\n",
    "    plt.ylabel('Test AUC')\n",
    "    plt.title('Model Scaling: AUC vs Parameters')\n",
    "    \n",
    "    # F1 vs Parameters\n",
    "    plt.subplot(1, 2, 2)\n",
    "    y = [r['F1'] for r in results]\n",
    "    plt.plot(x, y, 'o-')\n",
    "    plt.xlabel('Parameters (Millions)')\n",
    "    plt.ylabel('Test F1')\n",
    "    plt.title('Model Scaling: F1 vs Parameters')\n",
    "    \n",
    "    plt.tight_layout()\n",
    "    plt.show()\n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "0e307616",
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Using device: cuda\n",
      "\n",
      "=== Running experiment for Tiny model ===\n",
      "Total trainable parameters: 0.40M\n",
      "cuda\n",
      "loading data...\n",
      "\n",
      "=== Starting Pretraining ===\n",
      "Pretrain Epoch: 000, TSNE Loss: 0.7436\n",
      "Pretrain Epoch: 005, TSNE Loss: 0.6967\n",
      "Pretrain Epoch: 010, TSNE Loss: 0.6668\n",
      "Pretrain Epoch: 015, TSNE Loss: 0.5943\n",
      "Pretrain Epoch: 020, TSNE Loss: 0.5234\n",
      "Pretrain Epoch: 025, TSNE Loss: 0.3963\n",
      "Pretrain Epoch: 030, TSNE Loss: 0.2856\n",
      "Pretrain Epoch: 035, TSNE Loss: 0.1364\n",
      "Pretrain Epoch: 040, TSNE Loss: 0.0201\n",
      "Pretrain Epoch: 045, TSNE Loss: -0.0552\n",
      "Pretrain Epoch: 050, TSNE Loss: -0.0940\n",
      "Pretrain Epoch: 055, TSNE Loss: -0.1289\n",
      "Pretrain Epoch: 060, TSNE Loss: -0.1516\n",
      "Pretrain Epoch: 065, TSNE Loss: -0.1976\n",
      "Pretrain Epoch: 070, TSNE Loss: -0.2683\n",
      "Pretrain Epoch: 075, TSNE Loss: -0.3282\n",
      "Pretrain Epoch: 080, TSNE Loss: -0.4319\n",
      "Pretrain Epoch: 085, TSNE Loss: -0.5307\n",
      "Pretrain Epoch: 090, TSNE Loss: -0.6388\n",
      "Pretrain Epoch: 095, TSNE Loss: -0.7401\n",
      "\n",
      "=== Starting Fine-tuning ===\n",
      "Epoch: 000 | Loss: 74.9203 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 005 | Loss: 1.4788 | Val AUC: 0.7882 | Val F1: 0.4813\n",
      "Epoch: 010 | Loss: 0.7453 | Val AUC: 0.8867 | Val F1: 0.6110\n",
      "Epoch: 015 | Loss: 0.4244 | Val AUC: 0.9211 | Val F1: 0.9134\n",
      "Epoch: 020 | Loss: 0.2253 | Val AUC: 0.9361 | Val F1: 0.7552\n",
      "Epoch: 025 | Loss: 0.2288 | Val AUC: 0.8911 | Val F1: 0.8118\n",
      "Epoch: 030 | Loss: 0.2277 | Val AUC: 0.9453 | Val F1: 0.8827\n",
      "Epoch: 035 | Loss: 0.1602 | Val AUC: 0.9143 | Val F1: 0.8570\n",
      "Epoch: 040 | Loss: 0.0174 | Val AUC: 0.9439 | Val F1: 0.8273\n",
      "Epoch: 045 | Loss: 0.1565 | Val AUC: 0.9403 | Val F1: 0.8045\n",
      "Epoch: 050 | Loss: 0.1933 | Val AUC: 0.8371 | Val F1: 0.7198\n",
      "Epoch: 055 | Loss: 0.0940 | Val AUC: 0.9519 | Val F1: 0.9140\n",
      "Epoch: 060 | Loss: 0.0232 | Val AUC: 0.9524 | Val F1: 0.8747\n",
      "Epoch: 065 | Loss: 0.0149 | Val AUC: 0.9470 | Val F1: 0.8665\n",
      "Epoch: 070 | Loss: 0.0277 | Val AUC: 0.9313 | Val F1: 0.9145\n",
      "Epoch: 075 | Loss: 0.0952 | Val AUC: 0.9509 | Val F1: 0.9249\n",
      "Epoch: 080 | Loss: 0.1035 | Val AUC: 0.9507 | Val F1: 0.8640\n",
      "Epoch: 085 | Loss: 0.0225 | Val AUC: 0.9162 | Val F1: 0.9102\n",
      "Epoch: 090 | Loss: 0.2129 | Val AUC: 0.9562 | Val F1: 0.9063\n",
      "Epoch: 095 | Loss: 0.0334 | Val AUC: 0.9592 | Val F1: 0.8973\n",
      "Epoch: 100 | Loss: 0.0542 | Val AUC: 0.9437 | Val F1: 0.9240\n",
      "Epoch: 105 | Loss: 0.1053 | Val AUC: 0.9247 | Val F1: 0.9085\n",
      "Epoch: 110 | Loss: 0.0902 | Val AUC: 0.9408 | Val F1: 0.9156\n",
      "Epoch: 115 | Loss: 0.1244 | Val AUC: 0.9373 | Val F1: 0.8487\n",
      "Epoch: 120 | Loss: 0.0845 | Val AUC: 0.9415 | Val F1: 0.9219\n",
      "Epoch: 125 | Loss: 0.1657 | Val AUC: 0.9584 | Val F1: 0.8739\n",
      "Epoch: 130 | Loss: 0.2167 | Val AUC: 0.9605 | Val F1: 0.9193\n",
      "Epoch: 135 | Loss: 0.0958 | Val AUC: 0.9428 | Val F1: 0.9084\n",
      "Epoch: 140 | Loss: 0.2003 | Val AUC: 0.9438 | Val F1: 0.9193\n",
      "Epoch: 145 | Loss: 0.2602 | Val AUC: 0.9246 | Val F1: 0.8288\n",
      "Epoch: 150 | Loss: 0.1373 | Val AUC: 0.9403 | Val F1: 0.9041\n",
      "Epoch: 155 | Loss: 0.0870 | Val AUC: 0.9498 | Val F1: 0.8640\n",
      "Epoch: 160 | Loss: 0.1726 | Val AUC: 0.9616 | Val F1: 0.9256\n",
      "Epoch: 165 | Loss: 0.0069 | Val AUC: 0.9352 | Val F1: 0.8133\n",
      "Epoch: 170 | Loss: 0.0636 | Val AUC: 0.9619 | Val F1: 0.9214\n",
      "Epoch: 175 | Loss: 0.0635 | Val AUC: 0.9408 | Val F1: 0.9129\n",
      "Epoch: 180 | Loss: 0.1278 | Val AUC: 0.9342 | Val F1: 0.9107\n",
      "Epoch: 185 | Loss: 0.0922 | Val AUC: 0.8983 | Val F1: 0.8260\n",
      "Epoch: 190 | Loss: 0.1232 | Val AUC: 0.9422 | Val F1: 0.9129\n",
      "Epoch: 195 | Loss: 0.1939 | Val AUC: 0.9546 | Val F1: 0.9150\n",
      "Epoch: 200 | Loss: 0.0189 | Val AUC: 0.9522 | Val F1: 0.9129\n",
      "Epoch: 205 | Loss: 0.0138 | Val AUC: 0.9539 | Val F1: 0.8858\n",
      "Epoch: 210 | Loss: 0.0783 | Val AUC: 0.9663 | Val F1: 0.8996\n",
      "Epoch: 215 | Loss: 0.2500 | Val AUC: 0.9516 | Val F1: 0.9107\n",
      "Epoch: 220 | Loss: 0.1758 | Val AUC: 0.9455 | Val F1: 0.8951\n",
      "Epoch: 225 | Loss: 0.3172 | Val AUC: 0.9537 | Val F1: 0.9107\n",
      "Epoch: 230 | Loss: 0.2096 | Val AUC: 0.9331 | Val F1: 0.4751\n",
      "Epoch: 235 | Loss: 0.0311 | Val AUC: 0.9622 | Val F1: 0.4751\n",
      "Epoch: 240 | Loss: 0.2225 | Val AUC: 0.9280 | Val F1: 0.9107\n",
      "Epoch: 245 | Loss: 0.0640 | Val AUC: 0.9382 | Val F1: 0.7047\n",
      "Epoch: 250 | Loss: 0.0587 | Val AUC: 0.9561 | Val F1: 0.9079\n",
      "Epoch: 255 | Loss: 0.1127 | Val AUC: 0.9641 | Val F1: 0.9155\n",
      "Epoch: 260 | Loss: 0.3023 | Val AUC: 0.9514 | Val F1: 0.4751\n",
      "Epoch: 265 | Loss: 0.1696 | Val AUC: 0.9106 | Val F1: 0.4751\n",
      "Epoch 00054: reducing learning rate of group 0 to 2.5000e-04.\n",
      "Epoch: 270 | Loss: 0.1886 | Val AUC: 0.9626 | Val F1: 0.4751\n",
      "Epoch: 275 | Loss: 0.1766 | Val AUC: 0.9207 | Val F1: 0.4751\n",
      "Epoch: 280 | Loss: 0.0352 | Val AUC: 0.9637 | Val F1: 0.8912\n",
      "Epoch: 285 | Loss: 0.0483 | Val AUC: 0.9354 | Val F1: 0.8935\n",
      "Epoch: 290 | Loss: 0.1532 | Val AUC: 0.9552 | Val F1: 0.8906\n",
      "Epoch: 295 | Loss: 0.1133 | Val AUC: 0.9521 | Val F1: 0.9118\n",
      "\n",
      "=== Final Test Results ===\n",
      "Test AUC: 0.9594 | Test AP: 0.8653 | Test F1: 0.9017 | G-mean: 0.8469\n",
      "\n",
      "=== Running experiment for Small model ===\n",
      "Total trainable parameters: 1.79M\n",
      "cuda\n",
      "loading data...\n",
      "\n",
      "=== Starting Pretraining ===\n",
      "Pretrain Epoch: 000, TSNE Loss: 0.7574\n",
      "Pretrain Epoch: 005, TSNE Loss: 0.6233\n",
      "Pretrain Epoch: 010, TSNE Loss: 0.5339\n",
      "Pretrain Epoch: 015, TSNE Loss: 0.3745\n",
      "Pretrain Epoch: 020, TSNE Loss: 0.2115\n",
      "Pretrain Epoch: 025, TSNE Loss: 0.0220\n",
      "Pretrain Epoch: 030, TSNE Loss: -0.1517\n",
      "Pretrain Epoch: 035, TSNE Loss: -0.3147\n",
      "Pretrain Epoch: 040, TSNE Loss: -0.4410\n",
      "Pretrain Epoch: 045, TSNE Loss: -0.5976\n",
      "Pretrain Epoch: 050, TSNE Loss: -0.7309\n",
      "Pretrain Epoch: 055, TSNE Loss: -0.8642\n",
      "Pretrain Epoch: 060, TSNE Loss: -0.9600\n",
      "Pretrain Epoch: 065, TSNE Loss: -1.0528\n",
      "Pretrain Epoch: 070, TSNE Loss: -1.1361\n",
      "Pretrain Epoch: 075, TSNE Loss: -1.2025\n",
      "Pretrain Epoch: 080, TSNE Loss: -1.2699\n",
      "Pretrain Epoch: 085, TSNE Loss: -1.3334\n",
      "Pretrain Epoch: 090, TSNE Loss: -1.3830\n",
      "Pretrain Epoch: 095, TSNE Loss: -1.4388\n",
      "\n",
      "=== Starting Fine-tuning ===\n",
      "Epoch: 000 | Loss: 547.4008 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 005 | Loss: 3.2490 | Val AUC: 0.5854 | Val F1: 0.4751\n",
      "Epoch: 010 | Loss: 0.4431 | Val AUC: 0.5420 | Val F1: 0.4751\n",
      "Epoch: 015 | Loss: 0.3909 | Val AUC: 0.5892 | Val F1: 0.4751\n",
      "Epoch: 020 | Loss: 0.2908 | Val AUC: 0.5806 | Val F1: 0.4751\n",
      "Epoch: 025 | Loss: 0.5618 | Val AUC: 0.5521 | Val F1: 0.4751\n",
      "Epoch: 030 | Loss: 0.4246 | Val AUC: 0.5929 | Val F1: 0.4751\n",
      "Epoch: 035 | Loss: 0.3786 | Val AUC: 0.6413 | Val F1: 0.4751\n",
      "Epoch: 040 | Loss: 0.2744 | Val AUC: 0.5975 | Val F1: 0.4751\n",
      "Epoch: 045 | Loss: 0.2769 | Val AUC: 0.6059 | Val F1: 0.4751\n",
      "Epoch: 050 | Loss: 0.3108 | Val AUC: 0.5437 | Val F1: 0.4751\n",
      "Epoch: 055 | Loss: 0.2517 | Val AUC: 0.5993 | Val F1: 0.4751\n",
      "Epoch: 060 | Loss: 0.2903 | Val AUC: 0.6189 | Val F1: 0.4751\n",
      "Epoch: 065 | Loss: 0.1761 | Val AUC: 0.6321 | Val F1: 0.4751\n",
      "Epoch: 070 | Loss: 0.2742 | Val AUC: 0.6204 | Val F1: 0.4751\n",
      "Epoch: 075 | Loss: 0.3348 | Val AUC: 0.6125 | Val F1: 0.4751\n",
      "Epoch: 080 | Loss: 0.2813 | Val AUC: 0.6619 | Val F1: 0.4751\n",
      "Epoch: 085 | Loss: 0.2692 | Val AUC: 0.6609 | Val F1: 0.4751\n",
      "Epoch: 090 | Loss: 0.3467 | Val AUC: 0.5638 | Val F1: 0.4751\n",
      "Epoch: 095 | Loss: 0.3513 | Val AUC: 0.8438 | Val F1: 0.4751\n",
      "Epoch: 100 | Loss: 0.2543 | Val AUC: 0.6317 | Val F1: 0.4751\n",
      "Epoch: 105 | Loss: 0.3345 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 110 | Loss: 0.3419 | Val AUC: 0.6347 | Val F1: 0.4751\n",
      "Epoch: 115 | Loss: 0.3232 | Val AUC: 0.6073 | Val F1: 0.4751\n",
      "Epoch: 120 | Loss: 0.2911 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 125 | Loss: 0.1912 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 130 | Loss: 0.3448 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 135 | Loss: 0.3127 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 140 | Loss: 0.3449 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 145 | Loss: 0.4464 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 150 | Loss: 0.4106 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch 00031: reducing learning rate of group 0 to 2.5000e-04.\n",
      "Epoch: 155 | Loss: 0.2793 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 160 | Loss: 0.3118 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 165 | Loss: 0.2093 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 170 | Loss: 0.2429 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 175 | Loss: 0.3465 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 180 | Loss: 0.2050 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 185 | Loss: 0.2393 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 190 | Loss: 0.3112 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 195 | Loss: 0.3137 | Val AUC: 0.5000 | Val F1: 0.4751\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch: 200 | Loss: 0.2373 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 205 | Loss: 0.2377 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch 00042: reducing learning rate of group 0 to 1.2500e-04.\n",
      "Epoch: 210 | Loss: 0.1649 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 215 | Loss: 0.4949 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 220 | Loss: 0.2755 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 225 | Loss: 0.4563 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 230 | Loss: 0.3111 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 235 | Loss: 0.1311 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 240 | Loss: 0.3466 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 245 | Loss: 0.1664 | Val AUC: 0.5779 | Val F1: 0.4751\n",
      "Early stopping at epoch 245\n",
      "\n",
      "=== Final Test Results ===\n",
      "Test AUC: 0.8354 | Test AP: 0.6382 | Test F1: 0.4750 | G-mean: 0.0000\n",
      "\n",
      "=== Running experiment for Medium model ===\n",
      "Total trainable parameters: 8.31M\n",
      "cuda\n",
      "loading data...\n",
      "\n",
      "=== Starting Pretraining ===\n",
      "Pretrain Epoch: 000, TSNE Loss: 0.7424\n",
      "Pretrain Epoch: 005, TSNE Loss: 0.4982\n",
      "Pretrain Epoch: 010, TSNE Loss: 0.2425\n",
      "Pretrain Epoch: 015, TSNE Loss: -0.0572\n",
      "Pretrain Epoch: 020, TSNE Loss: -0.3375\n",
      "Pretrain Epoch: 025, TSNE Loss: -0.5661\n",
      "Pretrain Epoch: 030, TSNE Loss: -0.7878\n",
      "Pretrain Epoch: 035, TSNE Loss: -0.9551\n",
      "Pretrain Epoch: 040, TSNE Loss: -1.0925\n",
      "Pretrain Epoch: 045, TSNE Loss: -1.2164\n",
      "Pretrain Epoch: 050, TSNE Loss: -1.3075\n",
      "Pretrain Epoch: 055, TSNE Loss: -1.3903\n",
      "Pretrain Epoch: 060, TSNE Loss: -1.4448\n",
      "Pretrain Epoch: 065, TSNE Loss: -1.5083\n",
      "Pretrain Epoch: 070, TSNE Loss: -1.5558\n",
      "Pretrain Epoch: 075, TSNE Loss: -1.6047\n",
      "Pretrain Epoch: 080, TSNE Loss: -1.6507\n",
      "Pretrain Epoch: 085, TSNE Loss: -1.6984\n",
      "Pretrain Epoch: 090, TSNE Loss: -1.7279\n",
      "Pretrain Epoch: 095, TSNE Loss: -1.7672\n",
      "\n",
      "=== Starting Fine-tuning ===\n",
      "Epoch: 000 | Loss: 2210.7623 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 005 | Loss: 1.6462 | Val AUC: 0.6136 | Val F1: 0.4751\n",
      "Epoch: 010 | Loss: 2.5266 | Val AUC: 0.6256 | Val F1: 0.4751\n",
      "Epoch: 015 | Loss: 0.3751 | Val AUC: 0.6219 | Val F1: 0.4751\n",
      "Epoch: 020 | Loss: 0.3051 | Val AUC: 0.6380 | Val F1: 0.4751\n",
      "Epoch: 025 | Loss: 0.4124 | Val AUC: 0.6510 | Val F1: 0.4751\n",
      "Epoch: 030 | Loss: 0.4103 | Val AUC: 0.7648 | Val F1: 0.4751\n",
      "Epoch: 035 | Loss: 0.4699 | Val AUC: 0.7674 | Val F1: 0.4751\n",
      "Epoch: 040 | Loss: 0.2560 | Val AUC: 0.8095 | Val F1: 0.4751\n",
      "Epoch: 045 | Loss: 0.2574 | Val AUC: 0.8307 | Val F1: 0.4751\n",
      "Epoch: 050 | Loss: 0.2711 | Val AUC: 0.8901 | Val F1: 0.4751\n",
      "Epoch: 055 | Loss: 0.2443 | Val AUC: 0.8709 | Val F1: 0.4751\n",
      "Epoch: 060 | Loss: 0.2175 | Val AUC: 0.8901 | Val F1: 0.4751\n",
      "Epoch: 065 | Loss: 0.1478 | Val AUC: 0.8869 | Val F1: 0.4751\n",
      "Epoch: 070 | Loss: 0.1921 | Val AUC: 0.8871 | Val F1: 0.4751\n",
      "Epoch: 075 | Loss: 0.3127 | Val AUC: 0.8898 | Val F1: 0.4751\n",
      "Epoch: 080 | Loss: 0.2571 | Val AUC: 0.8893 | Val F1: 0.4751\n",
      "Epoch: 085 | Loss: 0.1982 | Val AUC: 0.8857 | Val F1: 0.4751\n",
      "Epoch: 090 | Loss: 0.3090 | Val AUC: 0.9324 | Val F1: 0.4751\n",
      "Epoch: 095 | Loss: 0.2561 | Val AUC: 0.9004 | Val F1: 0.4751\n",
      "Epoch: 100 | Loss: 0.1991 | Val AUC: 0.9147 | Val F1: 0.4751\n",
      "Epoch: 105 | Loss: 0.2537 | Val AUC: 0.8805 | Val F1: 0.4751\n",
      "Epoch: 110 | Loss: 0.2899 | Val AUC: 0.9059 | Val F1: 0.4751\n",
      "Epoch: 115 | Loss: 0.3096 | Val AUC: 0.9234 | Val F1: 0.4751\n",
      "Epoch: 120 | Loss: 0.3599 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 125 | Loss: 0.2522 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 130 | Loss: 0.3639 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 135 | Loss: 0.3257 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 140 | Loss: 0.3370 | Val AUC: 0.6275 | Val F1: 0.4751\n",
      "Epoch: 145 | Loss: 0.4371 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch 00030: reducing learning rate of group 0 to 2.5000e-04.\n",
      "Epoch: 150 | Loss: 0.4068 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 155 | Loss: 0.2885 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 160 | Loss: 0.3162 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 165 | Loss: 0.2213 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 170 | Loss: 0.2505 | Val AUC: 0.5788 | Val F1: 0.4751\n",
      "Epoch: 175 | Loss: 0.3367 | Val AUC: 0.6485 | Val F1: 0.4751\n",
      "Epoch: 180 | Loss: 0.2161 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 185 | Loss: 0.2459 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 190 | Loss: 0.3116 | Val AUC: 0.5019 | Val F1: 0.4751\n",
      "Epoch: 195 | Loss: 0.3113 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 200 | Loss: 0.2418 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch 00041: reducing learning rate of group 0 to 1.2500e-04.\n",
      "Epoch: 205 | Loss: 0.2484 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 210 | Loss: 0.1725 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 215 | Loss: 0.4857 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 220 | Loss: 0.2763 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 225 | Loss: 0.4499 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 230 | Loss: 0.3112 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 235 | Loss: 0.1381 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 240 | Loss: 0.3461 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Early stopping at epoch 240\n",
      "\n",
      "=== Final Test Results ===\n",
      "Test AUC: 0.9199 | Test AP: 0.8190 | Test F1: 0.4750 | G-mean: 0.0000\n",
      "\n",
      "=== Scaling Experiment Results ===\n",
      "Model    Params(M)    Layers   HiddenDim  AUC      F1       G-Mean  \n",
      "Tiny     0.40         2        64         0.9594   0.9017   0.8469  \n",
      "Small    1.79         3        128        0.8354   0.4750   0.0000  \n",
      "Medium   8.31         4        256        0.9199   0.4750   0.0000  \n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAABKUAAAJOCAYAAABm7rQwAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjEsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvc2/+5QAAAAlwSFlzAAAPYQAAD2EBqD+naQAAsHlJREFUeJzs3Xd4VGXax/HfZNIrJZXehAxShCBIAF1cBVFRLCs2EEQFwUXUtbDYYFXWxouigAiIICtYWVFEWXctgEhHgQSkGUpCSIBU0mbO+0eYgZgBE0hyJpnv57rmusyZM2fuk2TMzf08z/1YDMMwBAAAAAAAANQgH7MDAAAAAAAAgPehKAUAAAAAAIAaR1EKAAAAAAAANY6iFAAAAAAAAGocRSkAAAAAAADUOIpSAAAAAAAAqHEUpQAAAAAAAFDjKEoBAAAAAACgxlGUAgAAAAAAQI2jKAWvM2/ePFksFlksFn377bflnjcMQ23atJHFYtGf/vSnKn1vi8WiZ599ttKv27dvnywWi+bNm/eH5+7fv1+jR49W27ZtFRQUpAYNGqhjx4669957tX///soHfY7xOb/P+/btq5b3PF833nijLBaLHnjgAbfPO+Nfv3692+evvfZatWjRotzx7OxsPf/88+rWrZvCw8MVEBCgFi1a6O6779bGjRur8hYq7dtvv3X97lssFlmtVsXExOgvf/mLkpKSTI2tuuXn5+vZZ591+5kHAFQOuVTVqy251LBhw8rkEqc/Pv/8c9d5U6dO1Y033qiWLVtWy+/B+Tj999discjX11dNmjTR8OHDdfDgQbPDq1aHDh3Ss88+q82bN5sdCuDia3YAgFnCwsI0Z86ccn8kv/vuO+3evVthYWHmBHYeDhw4oK5du6pevXp65JFH1K5dO2VlZWn79u364IMPtGfPHjVt2rRGYrnmmmv0448/Ki4urkberzLS09NdidPChQv1yiuvKDAw8Lyvu3v3bvXr10/p6ekaNWqUJk6cqNDQUO3bt08ffPCBEhISdPz4cUVERJz3e52PF154QX379lVRUZHWr1+vSZMm6ZtvvtEvv/yixo0bmxpbdcnPz9fEiRMlyaMSYwCozcilqpen5lJBQUH673//W+54fHy8679nzpypkJAQXX755Vq6dGlNhldh77zzjuLj43XixAl9//33mjx5sr777jv98ssvCgkJMTu8anHo0CFNnDhRLVq00EUXXWR2OIAkilLwYoMHD9bChQv15ptvKjw83HV8zpw56tmzp7Kzs02M7ty8/fbbysjI0Nq1a9WyZUvX8UGDBunvf/+7HA5HjcUSFRWlqKioGnu/ypg/f76Ki4t1zTXX6IsvvtAnn3yi22+//byuabfbdcMNNygjI0M//vijOnTo4Hrusssu01133aUvv/xSfn5+5xv+ebvgggt0ySWXSJIuvfRS1atXTyNGjNC8efM0YcKE87p2fn6+goODqyLMWqG4uNg1ygoA3oZcqnp5ai7l4+PjyiPOZPv27fLxKV2Uc3pO5Ek6dOigbt26SZL69u0ru92uf/zjH1qyZInuuOOO87r2iRMnFBQUVBVh1gonTpxQYGCgLBaL2aGgFmL5HrzWbbfdJkl6//33XceysrL08ccf6+6773b7mqNHj2r06NFq3Lix/P391apVK02YMEGFhYVlzsvOzta9996rhg0bKjQ0VFdddZV27tzp9pq//vqrbr/9dkVHRysgIEA2m01vvvnmOd1TZmamfHx8FB0d7fZ5Z3Lg9NNPP2ngwIFq2LChAgMD1bp1a40bN871/K5duzR8+HBdcMEFCg4OVuPGjTVw4ED98ssvfxiLuynnf/rTn9ShQwetW7dOffr0UXBwsFq1aqV//vOf5ZK8bdu2qV+/fgoODlZUVJTGjBmjL7744oxLBSpj7ty5iomJ0bvvvqugoCDNnTv3vK4nSUuWLNEvv/yi8ePHnzH5GjBgwBkLNkeOHJG/v7+eeuqpcs8lJyfLYrHo9ddfl1Ra+Pnb3/6mli1bKjAwUA0aNFC3bt3K/C5XhjOx/O233yRJb775pi699FJFR0crJCREHTt21EsvvaTi4uIyr3P+PL///nslJiYqODjY9dlZvHix+vXrp7i4OAUFBclms+mJJ55QXl5emWsMGzZMoaGhSk5OVv/+/RUSEqK4uDj985//lCStWbNGvXv3VkhIiNq2bat33323XPxpaWkaOXKkmjRpIn9/f7Vs2VITJ05USUmJpNIlEc6kfuLEia7p+sOGDXNdoyKfQ+fyxwULFuiRRx5R48aNFRAQoF27dlX5zwQAagNyKe/Npf7I779PFbVlyxZZLBbNmTOn3HNffvmlLBaLPvvsM0mludN9992npk2bKiAgQFFRUerVq5f+85//nNN7/z4fmjhxonr06KEGDRooPDxcXbt21Zw5c2QYRpnXtWjRQtdee60++eQTdenSRYGBga7Z2ZXNqX788UclJiYqKChILVq00DvvvCNJ+uKLL9S1a1cFBwerY8eOWr58ebn4/+hz8O233+riiy+WJA0fPtyVD52+HHb9+vW67rrr1KBBAwUGBqpLly764IMPyryP8/fy66+/1t13362oqCgFBwersLCwyn8m8A4M7cJrhYeH6+abb9bcuXM1cuRISaVJlY+PjwYPHqypU6eWOb+goEB9+/bV7t27NXHiRHXq1Ek//PCDJk+erM2bN+uLL76QVNpHYdCgQVq9erWefvppXXzxxVq1apUGDBhQLobt27crMTFRzZo106uvvqrY2Fh99dVXGjt2rDIyMvTMM89U6p569uypN998UzfeeKMefvhh9ezZs8zI5em++uorDRw4UDabTVOmTFGzZs20b98+ff31165zDh06pIYNG+qf//ynoqKidPToUb377rvq0aOHNm3apHbt2lUqPqm0gHDHHXfokUce0TPPPKNPP/1U48ePV6NGjTR06FBJUmpqqi677DKFhIRoxowZio6O1vvvv++2/9O3336rvn376plnnqlQj4nVq1crKSlJjz76qBo2bKibbrpJCxcu1N69e8uMiFaW8/s2aNCgc3p9VFSUrr32Wr377ruaOHFimWTunXfekb+/v2vU7uGHH9aCBQv03HPPqUuXLsrLy9PWrVuVmZl5Tu+9a9cuVwxS6TLE22+/XS1btpS/v7+2bNmi559/XsnJyeUKeKmpqbrzzjv12GOP6YUXXnDF/euvv+rqq6/WuHHjFBISouTkZL344otau3ZtuSn/xcXFuvHGGzVq1Cg9+uij+te//qXx48crOztbH3/8sR5//HE1adJE06ZN07Bhw9ShQwclJCRIKv196t69u3x8fPT000+rdevW+vHHH/Xcc89p3759eueddxQXF6fly5frqquu0ogRI3TPPfeUud/Kfg7Hjx+vnj17aubMma5/uFT1zwQAagNyKe/MpSS5Bn6cnL0qz1fnzp3VpUsXvfPOOxoxYkSZ5+bNm6fo6GhdffXVkqQhQ4Zo48aNev7559W2bVsdP35cGzdurLJ8aN++fRo5cqSaNWsmqXSg7K9//asOHjyop59+usxrN27cqKSkJD355JNq2bKla/lfZXKqtLQ0DR8+XI899pgr77n77ru1f/9+ffTRR/r73/+uiIgITZo0SYMGDdKePXvUqFEjSRX7HHTt2lXvvPOOhg8frieffFLXXHONJKlJkyaSpP/973+66qqr1KNHD82cOVMRERFatGiRBg8erPz8/DKDeZJ0991365prrtGCBQuUl5cnPz+/Kv+ZwEsYgJd55513DEnGunXrjP/973+GJGPr1q2GYRjGxRdfbAwbNswwDMO48MILjcsuu8z1upkzZxqSjA8++KDM9V588UVDkvH1118bhmEYX375pSHJeO2118qc9/zzzxuSjGeeecZ1rH///kaTJk2MrKysMuc+8MADRmBgoHH06FHDMAxj7969hiTjnXfeOeu9ORwOY+TIkYaPj48hybBYLIbNZjMeeughY+/evWXObd26tdG6dWvjxIkTZ73m6UpKSoyioiLjggsuMB566CHXcXfxOb/Pp7/vZZddZkgyfvrppzLXbd++vdG/f3/X148++qhhsViMbdu2lTmvf//+hiTjf//7n+vYt99+a1itVmPixIkVuoe7777bkGQkJSUZhmG4fgeeeuqpMued/nvizjXXXGM0b97c9fVVV11lSDIKCgoqFIc7n332WZnfJcMo/Z43atTIuOmmm1zHOnToYAwaNKjS13fe6+LFi43i4mIjPz/f+P777402bdoYVqvV2LJlS7nX2O12o7i42Jg/f75htVpdv5OGcern+c0335z1fR0Oh1FcXGx89913hqQy73PXXXcZkoyPP/7Yday4uNiIiooyJBkbN250Hc/MzDSsVqvx8MMPu46NHDnSCA0NNX777bcy7/nKK68Ykly/Q0eOHCn3+XOq6OfQ+f279NJLy13jXH8mAFAbkUuV8sZcyvl3+/ePXr16nfE1v/89+COvv/66IcnYsWOH69jRo0eNgIAA45FHHnEdCw0NNcaNG1fh6zo5v69r1qwxiouLjZycHOPzzz83oqKijLCwMCMtLa3ca5z50KRJk4yGDRsaDofD9Vzz5s0Nq9VaJl53KpJTrV+/3nXMmfcEBQUZBw8edB3fvHmzIcl4/fXXXccq+jlYt27dGT8H8fHxRpcuXYzi4uIyx6+99lojLi7OsNvtZb5/Q4cOLXeNc/2ZwLuxfA9e7bLLLlPr1q01d+5c/fLLL1q3bt0Zp5v/97//VUhIiG6++eYyx52jBt98842k0lEGSeXWov++Z1FBQYG++eYb3XDDDQoODlZJSYnrcfXVV6ugoEBr1qyp1P1YLBbNnDlTe/bs0fTp0zV8+HAVFxfr//7v/3ThhRfqu+++kyTt3LlTu3fv1ogRI87a4LukpEQvvPCC2rdvL39/f/n6+srf31+//vrrOe/WFhsbq+7du5c51qlTJ9dUaam0QWqHDh3Uvn37Muc5lwmc7rLLLlNJSUm5ESt3cnNz9cEHHygxMdHVjNP5OzBv3rwa7RPhzoABAxQbG+uaqi2VjsIeOnSozO9l9+7d9eWXX+qJJ57Qt99+qxMnTlTqfQYPHiw/Pz8FBwfr0ksvld1u10cffaROnTpJkjZt2qTrrrtODRs2lNVqlZ+fn4YOHSq73V5u6UT9+vV1+eWXl3uPPXv26Pbbb1dsbKzrGpdddpkklfvdsVgsrlFPSfL19VWbNm0UFxenLl26uI43aNBA0dHRZX5XPv/8c/Xt21eNGjUq8xlyjqY7f+fP5Fw+hzfddFO565zvzwQAaityKe/KpaTSRufr1q0r83C33O5c3XHHHQoICCizE+H777+vwsJCDR8+3HWse/fumjdvnp577jmtWbOm3JK4P3LJJZfIz89PYWFhuvbaaxUbG6svv/xSMTExkkp/X6+44gpFRES4cpmnn35amZmZSk9PL3OtTp06qW3btuXeozI5VVxcnGsmuHQq77noootcM6IkyWazSTq1zLAqPge7du1ScnKy6zP3+2ukpqZqx44dZV5zpnzofH4m8E4UpeDVLBaLhg8frvfee08zZ85U27Zt1adPH7fnZmZmKjY2tlwDv+joaPn6+rqmpWZmZsrX11cNGzYsc15sbGy565WUlGjatGny8/Mr83D+Az0jI+Oc7qt58+a6//77NWfOHP36669avHixCgoK9Oijj0oqXYMvnZqueyYPP/ywnnrqKQ0aNEhLly7VTz/9pHXr1qlz587n/I/u339fJCkgIKDM9TIzM10JwencHauMxYsXKzc3V7fccouOHz+u48ePKysrS7fccov279+vFStWuM51Nq622+1ur1VSUlKmablzavfevXvPOT5fX18NGTJEn376qY4fPy6pdKp6XFyc+vfv7zrv9ddf1+OPP64lS5aob9++atCggQYNGqRff/21Qu/z4osvat26ddq4caNSUlK0Z88e17LDlJQU9enTRwcPHtRrr72mH374QevWrXP1JPj9z93djkC5ubnq06ePfvrpJz333HP69ttvtW7dOn3yySdurxEcHFwuoff391eDBg3KXdvf318FBQWurw8fPqylS5eW+wxdeOGFkv74M3Qun0N393y+PxMAqK3Ipbwrl5JK+0V169atzONcliGeSYMGDXTddddp/vz5rjxs3rx56t69u+vvu1Sa1911112aPXu2evbsqQYNGmjo0KFKS0ur0PvMnz9f69at06ZNm3To0CH9/PPP6tWrlyRp7dq16tevn6TS5verVq3SunXrXBvCVCQfqmxOdaa85/fH/f39JcmVD1XF5+Dw4cOSpL/97W/lrjF69Gi313B3z+f7M4F3oqcUvN6wYcP09NNPa+bMmXr++efPeF7Dhg31008/yTCMMslUenq6SkpKFBkZ6TqvpKREmZmZZZKG3//PuH79+rJarRoyZIjGjBnj9j3Pp8fR6W655RZNnjxZW7dulXRqrfyBAwfO+rr33ntPQ4cO1QsvvFDmeEZGhurVq1clsbnTsGFD1x/H053vHzTnKN64cePKNCE9/Xln8ceZtB08eNDttQ4ePFgmsevfv79mzZqlJUuW6IknnjjnGIcPH66XX37ZtYb/s88+07hx48r0aQgJCdHEiRM1ceJEHT582DVDZ+DAgUpOTv7D92jVqpVrt5nfW7JkifLy8vTJJ5+oefPmruObN292e767XVb++9//6tChQ/r2229ds6MkuQptVSkyMlKdOnU642f39JFFd87lc+juns/3ZwIAtRm51JnVtVyqpgwfPlwffvihVqxYoWbNmmndunWaMWNGmXMiIyM1depUTZ06VSkpKfrss8/0xBNPKD093W0j8N+z2WxnzIcWLVokPz8/ff7552UGzpYsWeL2fHe5QWVzqnNVFZ8D52dv/PjxuvHGG92e8/vCo7t7Pt+fCbwTRSl4vcaNG+vRRx9VcnKy7rrrrjOe9+c//1kffPCBlixZohtuuMF1fP78+a7npdItZV966SUtXLhQY8eOdZ33r3/9q8z1goOD1bdvX23atEmdOnVyjXqcj9TU1DPOXNm/f7/rH+ht27Z1TbV/+OGHFRAQ4PZ6Foul3HNffPGFDh48qDZt2px3vGdy2WWX6ZVXXtH27dvLTDtftGjROV8zKSlJP/74o2666Sa3TT6fe+45/fvf/3YlwJdccolCQ0O1ePHicn+ct2/frm3btpWZ5n799derY8eOmjx5sq699lq3O/B99dVXrp1yzsRms6lHjx565513ZLfby01V/72YmBgNGzZMW7Zs0dSpU5Wfn3/W6/8RZ4Jx+s/dMAy9/fbb53UNSXrrrbfOOa4zufbaa7Vs2TK1bt1a9evXP+N5zljczdKq6s9hVf9MAMDTkUt5Ry5Vk/r166fGjRvrnXfeUbNmzRQYGOh26aFTs2bN9MADD+ibb77RqlWrzvv9LRaLfH19ywwKnjhxQgsWLKjUNaTzy6kqojKfgzPlQ+3atdMFF1ygLVu2lCugnquq/pmg7qIoBUiu7efPZujQoXrzzTd11113ad++ferYsaNWrlypF154QVdffbWuuOIKSaV/RC+99FI99thjysvLU7du3bRq1Sq3f8Ree+019e7dW3369NH999+vFi1aKCcnR7t27dLSpUvL7VL2R55//nmtWrVKgwcP1kUXXaSgoCDt3btXb7zxhjIzM/Xyyy+7zn3zzTc1cOBAXXLJJXrooYfUrFkzpaSk6KuvvtLChQsllf6Df968eYqPj1enTp20YcMGvfzyy384Vf18jRs3TnPnztWAAQM0adIkxcTE6F//+pdrxsnpO9N99913+vOf/6ynn376rL0QnLOkHnvssXJ9GCQpJydH33zzjd577z09+OCDCgsL08SJE/XII4/I4XBo8ODBql+/vn755Re98MILat68eZlE2Wq16tNPP1W/fv3Us2dP3X///erbt69CQkL022+/6aOPPtLSpUt17NixP7z/u+++WyNHjtShQ4eUmJhYbmSqR48euvbaa9WpUyfVr19fSUlJWrBggXr27HnexY8rr7xS/v7+uu222/TYY4+poKBAM2bMqFDcTomJiapfv75GjRqlZ555Rn5+flq4cKG2bNlyXrG5M2nSJK1YsUKJiYkaO3as2rVrp4KCAu3bt0/Lli3TzJkz1aRJE4WFhal58+b697//rT//+c9q0KCBIiMj1aJFiyr5HFbnzwQAagNyqbqfS1XG+vXrtW/fPklSdna2DMPQRx99JEm6+OKLy8wccsdqtWro0KGaMmWKwsPDdeONNyoiIsL1fFZWlvr27avbb79d8fHxCgsL07p167R8+fIzzvSpjGuuuUZTpkzR7bffrvvuu0+ZmZl65ZVXzlh8dKcqcqqKqujnoHXr1goKCtLChQtls9kUGhqqRo0aqVGjRnrrrbc0YMAA9e/fX8OGDVPjxo119OhRJSUlaePGjfrwww/PGkN1/0xQh5naZh0wwR/tqubkbqeQzMxMY9SoUUZcXJzh6+trNG/e3Bg/fny5HdeOHz9u3H333Ua9evWM4OBg48orrzSSk5Pd7v61d+9e4+677zYaN25s+Pn5GVFRUUZiYqLx3HPPlTlHFdgxZs2aNcaYMWOMzp07Gw0aNDCsVqsRFRVlXHXVVcayZcvKnf/jjz8aAwYMMCIiIoyAgACjdevWZXaCOXbsmDFixAgjOjraCA4ONnr37m388MMPxmWXXVbme1OZHWMuvPDCcnHcddddZXayMwzD2Lp1q3HFFVcYgYGBRoMGDYwRI0YY7777brnd25y7/rjbVc2pqKjIiI6ONi666KIznlNSUmI0adLE6NixY5njH3zwgdG7d28jLCzM8PX1NZo1a2bcf//9bndmMYzSn/0//vEPo2vXrkZoaKjh5+dnNGvWzLjzzjuNVatWnfH9T5eVlWUEBQUZkoy333673PNPPPGE0a1bN6N+/fpGQECA0apVK+Ohhx4yMjIyznpd5/fqww8/POt5S5cuNTp37mwEBgYajRs3Nh599FHXTkin79Zzpp+nYRjG6tWrjZ49exrBwcFGVFSUcc899xgbN24s93ty1113GSEhIeVef6ZrN2/e3LjmmmvKHDty5IgxduxYo2XLloafn5/RoEEDIyEhwZgwYYKRm5vrOu8///mP0aVLFyMgIMCQZNx1112u5yryOTzb9+9cfyYAUBuRS53iLbnU6e/j7u+2u/PkZpe+ivwMnHbu3Ol6zYoVK8o8V1BQYIwaNcro1KmTER4ebgQFBRnt2rUznnnmGSMvL++s163o7+/cuXONdu3auf6uT5482ZgzZ065n4m73MTpfHOqM11bkjFmzJgyxyryOTAMw3j//feN+Ph4w8/Pr9zPfcuWLcYtt9xiREdHG35+fkZsbKxx+eWXGzNnznSdc6bv3/n8TODdLIZhGNVd+AKAqnDffffp/fffV2ZmZpVM0QcAAPAm5FIAPA3L9wB4pEmTJqlRo0Zq1aqVcnNz9fnnn2v27Nl68sknSaIAAAD+ALkUgNqAohQAj+Tn56eXX35ZBw4cUElJiS644AJNmTJFDz74oNmhAQAAeDxyKQC1Acv3AAAAAAAAUON8/vgUAAAAAAAAoGpRlAIAAAAAAECNoygFAAAAAACAGkejczccDocOHTqksLAwWSwWs8MBAAAexDAM5eTkqFGjRvLx8d7xPfIlAABwJhXNlyhKuXHo0CE1bdrU7DAAAIAH279/v5o0aWJ2GKYhXwIAAH/kj/IlilJuhIWFSSr95oWHh5scDQAA8CTZ2dlq2rSpK1/wVuRLAADgTCqaL1GUcsM5BT08PJwkCwAAuOXtS9bIlwAAwB/5o3zJexshAAAAAAAAwDQUpQAAAAAAAFDjKEoBAAAAAACgxlGUAgAAAAAAQI2jKAUAAAAAAIAaR1EKAAAAAAAANY6iFAAAAAAAAGocRSkAAAAAAADUOIpSAAAAAAAAqHEUpQAAAAAAAFDjKEoBAAAAAACgxlGUAgAAAAAAQI2jKAUAAAAAAIAaR1EKAAAAAAAANc70otT06dPVsmVLBQYGKiEhQT/88MNZz3/zzTdls9kUFBSkdu3aaf78+eXOOX78uMaMGaO4uDgFBgbKZrNp2bJl1XULAAAAAAAAqCRfM9988eLFGjdunKZPn65evXrprbfe0oABA7R9+3Y1a9as3PkzZszQ+PHj9fbbb+viiy/W2rVrde+996p+/foaOHCgJKmoqEhXXnmloqOj9dFHH6lJkybav3+/wsLCavr23LI7DK3de1TpOQWKDgtU95YNZPWxmB0WAACAxyBfAgDAO1gMwzDMevMePXqoa9eumjFjhuuYzWbToEGDNHny5HLnJyYmqlevXnr55Zddx8aNG6f169dr5cqVkqSZM2fq5ZdfVnJysvz8/M4pruzsbEVERCgrK0vh4eHndA13lm9N1cSl25WaVeA6FhcRqGcGttdVHeKq7H0AAED1qa48obYhXwIAAGdS0TzBtOV7RUVF2rBhg/r161fmeL9+/bR69Wq3ryksLFRgYGCZY0FBQVq7dq2Ki4slSZ999pl69uypMWPGKCYmRh06dNALL7wgu91+xlgKCwuVnZ1d5lHVlm9N1f3vbSyTYElSWlaB7n9vo5ZvTa3y9wQAAKhNyJcAAPAuphWlMjIyZLfbFRMTU+Z4TEyM0tLS3L6mf//+mj17tjZs2CDDMLR+/XrNnTtXxcXFysjIkCTt2bNHH330kex2u5YtW6Ynn3xSr776qp5//vkzxjJ58mRFRES4Hk2bNq26G1XpFPSJS7fL3ZQ057GJS7fL7jBt0hoAAKjlqqNPZ00iXwIAwPuY3ujcYinbH8AwjHLHnJ566ikNGDBAl1xyifz8/HT99ddr2LBhkiSr1SpJcjgcio6O1qxZs5SQkKBbb71VEyZMKLNE8PfGjx+vrKws12P//v1Vc3Mnrd17tNyI3+kMSalZBVq792iVvi8AAPAOzj6dEyZM0KZNm9SnTx8NGDBAKSkpbs939ul89tlntW3bNk2cOFFjxozR0qVLazjyU8iXAADwPqYVpSIjI2W1WsvNikpPTy83e8opKChIc+fOVX5+vvbt26eUlBS1aNFCYWFhioyMlCTFxcWpbdu2riKVVNqnKi0tTUVFRW6vGxAQoPDw8DKPqpSec+YE61zOAwAAON2UKVM0YsQI3XPPPbLZbJo6daqaNm16xkG5BQsWaOTIkRo8eLBatWqlW2+9VSNGjNCLL75Yw5GfQr4EAID3Ma0o5e/vr4SEBK1YsaLM8RUrVigxMfGsr/Xz81OTJk1ktVq1aNEiXXvttfLxKb2VXr16adeuXXI4HK7zd+7cqbi4OPn7+1f9jVRAdFjgH59UifMAAACcqqtPp7vXVGcPTvIlAAC8j6nL9x5++GHNnj1bc+fOVVJSkh566CGlpKRo1KhRkkqX1Q0dOtR1/s6dO/Xee+/p119/1dq1a3Xrrbdq69ateuGFF1zn3H///crMzNSDDz6onTt36osvvtALL7ygMWPG1Pj9OXVv2UBxEYE600bGFpXuKtO9ZYOaDAsAANQB1dWn8/equwcn+RIAAN7H1KLU4MGDNXXqVE2aNEkXXXSRvv/+ey1btkzNmzeXJKWmppbphWC32/Xqq6+qc+fOuvLKK1VQUKDVq1erRYsWrnOaNm2qr7/+WuvWrVOnTp00duxYPfjgg3riiSdq+vZcrD4WPTOwvSSVS7ScXz8zsL2sPmdKwwAAAM6uqvt0/l519+AkXwIAwPtYDMNgC5Pfyc7OVkREhLKysqq0v9TyramauHR7mSaecRGBemZge13VIa7K3gcAAFSf6soTzlVRUZGCg4P14Ycf6oYbbnAdf/DBB7V582Z99913Z3xtcXGxDh8+rLi4OM2aNUuPP/64jh8/7mqLcDbkSwAA4EwqmieYvvueN7mqQ5xWPn65/u+WzpIkPx+Lvv3bn0iwAADAOauuPp1mceZLb93Z1XXsq3GXki8BAFAHUZSqYVYfi66/qLGC/Kwqdhjaf+yE2SEBAIBarjr6dJrJ6mNR/w5xig4LkCTtOpJrckQAAKA6+JodgDfy8bGoXWyYNu8/rqTUbLWJDjU7JAAAUIsNHjxYmZmZmjRpklJTU9WhQ4cK9encsWOH/Pz81Ldv33J9Oj1BfFy40nOOKCk1W12b1Tc7HAAAUMUoSpnEFheuzfuPKzktWwM7NzI7HAAAUMuNHj1ao0ePdvvcvHnzynxts9m0adOmGojq/NjiwvT9ziNKTs0xOxQAAFANWL5nEltcmCSRZAEAAJyBLba0MWpyWrbJkQAAgOpAUcok8SeTrKRUkiwAAAB34k8bxGPDaAAA6h6KUiZxJlmHsgqUlV9scjQAAACep3VUqPysFuUUlugAm8MAAFDnUJQySXignxrXC5LElHQAAAB3/Kw+ahN9crZUGi0PAACoayhKmcjZV4olfAAAAO7ZYsmXAACoqyhKmSje1byTkT8AAAB3XH2lmFkOAECdQ1HKRLa4k83OKUoBAAC45cyX2LEYAIC6h6KUiZwjfzvSsmV3sKMMAADA7zlnlu/NzFN+UYnJ0QAAgKpEUcpELRqGKNDPRwXFDv2WmWd2OAAAAB4nKixAkaH+Mgxp5+Fcs8MBAABViKKUiaw+FrWLYUcZAACAszm1hI++UgAA1CUUpUzmnJLOjjIAAADuxbMDHwAAdRJFKZM5+0ol0bwTAADALdcgHjPLAQCoUyhKmcw1HZ1tjgEAANw6ffmeYbA5DAAAdQVFKZM5p6MfOHZC2QXFJkcDAADgeVpHh8jXx6LsghKlZhWYHQ4AAKgiFKVMVi/YX3ERgZKkHUxJBwAAKCfA16rWUaGS6CsFAEBdQlHKA7CjDAAAwNnZ4tixGACAuoailAdw7ShDkgUAAOBWfBw7FgMAUNdQlPIAJFkAAABn5xrEI18CAKDOoCjlAdqfnI6+Iy1HDgc7ygAAAPxe+5ODeHsz8lRQbDc5GgAAUBUoSnmAFg1D5O/ro/wiu/Yfyzc7HAAAAI8TFRagBiH+chjSr4dzzQ4HAABUAYpSHsDX6qO2MewoAwAAcCYWi4UlfAAA1DEUpTxEfKyzrxTNzgEAANxx5UtpFKUAAKgLKEp5CNvJPgnJJFkAAABu2U724UxmEA8AgDqBopSHsLmmo5NkAQAAuOMcxEtKy5ZhsDkMAAC1HUUpDxF/MslKOZqv3MISk6MBAADwPG2iQ2X1seh4frEOZxeaHQ4AADhPFKU8RIMQf8WEB0iSdqQxWwoAAOD3Av2sahUZIom+UgAA1AUUpTyIs3knfaUAAADcc84up68UAAC1H0UpDxIfxzbHAAAAZxMfS74EAEBdQVHKg7Rn5A8AAOCs2rNjMQAAdQZFKQ9yavleDjvKAAAAuOGcWb77SJ4KS+wmRwMAAM4HRSkP0ioqRP5WH+UWlujAsRNmhwMAAOBxYsMDFRHkJ7vD0K+Hc80OBwAAnAeKUh7Ez+qjNtGhkuiTAAAA4I7FYpHt5GypZHYsBgCgVqMo5WHiSbIAAADOytXygEE8AABqNYpSHsZ2MsliphQAAIB7zplSSTQ7BwCgVqMo5WGYKQUAAHB28a5BPDaHAQCgNqMo5WFsJ7c53peZp/yiEpOjAQAA8DxtY8LkY5GO5hXpSG6h2eEAAIBzRFHKw0SGBigyNECGIe1gthQAAEA5Qf5WtYgMkVQ6WwoAANROFKU8EDvKAAAAnJ2NZucAANR6FKU8kHMJH0kWAACAewziAQBQ+1GU8kDxsSd3lGE6OgAAgFvx7FgMAECtR1HKA7mSrLRsdpQBAABww7lj8a70XBWVOEyOBgAAnAuKUh6oTXSofH0syiko0aGsArPDAQAA8DiN6wUpLNBXJQ5Du4/kmh0OAAA4BxSlPJC/r4/aRIdKoq8UAACAOxaL5VSz8zTyJQAAaiOKUh7qVF8pkiwAAAB3nEv46MMJAEDtRFHKQzl34EtiRxkAAAC3XPkSg3gAANRKFKU8VPzJJIvlewAAAO45Z5YnM4gHAECtRFHKQ9lOJll7M/JUUGw3ORoAAADP0y42TBaLdCSnUBm5hWaHAwAAKomilIeKCgtQwxB/OQxp52FG/wAAAH4v2N9XLRqGSJKS6SsFAECtQ1HKQ1ksFlfzTpIsAAAA904t4aPlAQAAtQ1FKQ8Wf3Kb4+30lQIAAHCLfAkAgNqLopQHY+QPAADg7JhZDgBA7UVRyoM5tzlOTsuRYRgmRwMAAOB52p/Ml3al56rY7jA5GgAAUBkUpTxYm+hQWX0sOp5frLTsArPDAQAA8DiN6wUpNMBXRXaH9hzJMzscAABQCRSlPFign1WtItlRBgAA4Ex8fCxqR8sDAABqJYpSHs65hC+JJAsAAMAt28m+UkkM4gEAUKtQlPJwNO8EAAA4O+cOfMyUAgCgdqEo5eFsJ5OsJLY5BgAAcOvUTCnyJQAAahOKUh7OuXxvT0aeCortJkcDAADgedqdHMQ7nF2oo3lFJkcDAAAqiqKUh4sJD1C9YD/ZHYZ2peeaHQ4AAIDHCQ3wVbMGwZJYwgcAQG1ielFq+vTpatmypQIDA5WQkKAffvjhrOe/+eabstlsCgoKUrt27TR//vwznrto0SJZLBYNGjSoiqOuORaLRfGxTEkHAAA4m1P5En04AQCoLUwtSi1evFjjxo3ThAkTtGnTJvXp00cDBgxQSkqK2/NnzJih8ePH69lnn9W2bds0ceJEjRkzRkuXLi137m+//aa//e1v6tOnT3XfRrVzLuFLTiPJAgAAcMeVLzGIBwBArWFqUWrKlCkaMWKE7rnnHtlsNk2dOlVNmzbVjBkz3J6/YMECjRw5UoMHD1arVq106623asSIEXrxxRfLnGe323XHHXdo4sSJatWqVU3cSrWysaMMAADAWTmbnTOIBwBA7WFaUaqoqEgbNmxQv379yhzv16+fVq9e7fY1hYWFCgwMLHMsKChIa9euVXFxsevYpEmTFBUVpREjRlQolsLCQmVnZ5d5eJL4uFPT0Q3DMDkaAAAAzxN/chBvx+EcldgdJkcDAAAqwrSiVEZGhux2u2JiYsocj4mJUVpamtvX9O/fX7Nnz9aGDRtkGIbWr1+vuXPnqri4WBkZGZKkVatWac6cOXr77bcrHMvkyZMVERHhejRt2vTcb6watI0Jk49FOppXpCM5hWaHAwAA4HGaNQhWsL9VRSUO7cvMMzscAABQAaY3OrdYLGW+Ngyj3DGnp556SgMGDNAll1wiPz8/XX/99Ro2bJgkyWq1KicnR3feeafefvttRUZGVjiG8ePHKysry/XYv3//Od9PdQj0s6plZIgkKYkp6QAAAOX4+FjUjmbnAADUKqYVpSIjI2W1WsvNikpPTy83e8opKChIc+fOVX5+vvbt26eUlBS1aNFCYWFhioyM1O7du7Vv3z4NHDhQvr6+8vX11fz58/XZZ5/J19dXu3fvdnvdgIAAhYeHl3l4mviTzTvZgQ8AAMA95xI+8iUAAGoH04pS/v7+SkhI0IoVK8ocX7FihRITE8/6Wj8/PzVp0kRWq1WLFi3StddeKx8fH8XHx+uXX37R5s2bXY/rrrtOffv21ebNmz1uWV5l2E6O/LGjDAAAgHs0OwcAoHbxNfPNH374YQ0ZMkTdunVTz549NWvWLKWkpGjUqFGSSpfVHTx4UPPnz5ck7dy5U2vXrlWPHj107NgxTZkyRVu3btW7774rSQoMDFSHDh3KvEe9evUkqdzx2sa1zTFJFgAAgFuufIlBPAAAagVTi1KDBw9WZmamJk2apNTUVHXo0EHLli1T8+bNJUmpqalKSUlxnW+32/Xqq69qx44d8vPzU9++fbV69Wq1aNHCpDuoOc7le7vSc1VYYleAr9XkiAAAADyLs6fUoawCHc8vUr1gf5MjAgAAZ2MxDMMwOwhPk52drYiICGVlZXlMfynDMNR54tfKLijRsrF91L6RZ8QFAIC38cQ8wQye+n3o9c//6uDxE1p03yW6pFVDs8MBAMArVTRPMH33PVSMxWJxzZZKTmNKOgAAgDss4QMAoPagKFWLuJqd01cKAADALZqdAwBQe1CUqkWcM6XY5hgAAMC9+FjyJQAAaguKUrWIzVWUYuQPAADAHedMqR2Hc2R30DoVAABPRlGqFmkbEyqLRcrILdSRnEKzwwEAAPA4zRuGKNDPRwXFDv2WmWd2OAAA4CwoStUiwf6+atEwRBLNzgEAANyx+ljULqZ0thSzywEA8GwUpWoZV/NOkiwAAAC3bOxYDABArUBRqpZxNe8kyQIAAHArPpaZUgAA1AYUpWoZkiwAAICzY8diAABqB4pStYxzOvqu9BwV2x0mRwMAAOB5bCdnlh88fkLZBcUmRwMAAM6EolQt06R+kEIDfFVsN7TnCDvKAAAA/F5EsJ8aRQRKknakMbscAABPRVGqlrFYLKct4WNKOgAAgDss4QMAwPNRlKqF4k/uwEezcwAAAPfowwkAgOejKFULubY5JskCAABwy5UvMYgHAIDHoihVC8XHMh0dAADgbGwnZ5bvSMuRw2GYHA0AAHCHolQt1O7kdPT0nEJl5haaHA0AAIDnadEwRP6+PsovsivlaL7Z4QAAADcoStVCoQG+at4wWBI7ygAAALjja/VRu5jSgTyW8AEA4JkoStVSruadFKUAAADcotk5AACejaJULUVfKQAAgLOLjyNfAgDAk1GUqqXYUQYAAODsnM3Ok5lZDgCAR6IoVUs5k6ydh3NVYneYHA0AAIDncc4sTzmar9zCEpOjAQAAv0dRqpZqWj9YIf5WFZU4tDcjz+xwAAAAPE6DEH/FhAdIknYwuxwAAI9DUaqW8vGxqB3NzgEAAM7K5uorRb4EAICnoShVizmbdybTvBMAAMAt5xI++nACAOB5KErVYjbXNsckWQAAAO44+3AyUwoAAM9DUaoWc82UYvkeAACAW86ZUjvScuRwGCZHAwAATkdRqhZz9pRKzSrQ8fwik6MBAADwPK2iQuRv9VFuYYkOHj9hdjgAAOA0FKVqsfBAPzWpHySJKekAAHi76dOnq2XLlgoMDFRCQoJ++OGHs56/cOFCde7cWcHBwYqLi9Pw4cOVmZlZQ9HWHD+rj9pEh0qSttPyAAAAj0JRqpajeScAAFi8eLHGjRunCRMmaNOmTerTp48GDBiglJQUt+evXLlSQ4cO1YgRI7Rt2zZ9+OGHWrdune65554ajrxmxJ/sK5XMIB4AAB6FolQt154kCwAArzdlyhSNGDFC99xzj2w2m6ZOnaqmTZtqxowZbs9fs2aNWrRoobFjx6ply5bq3bu3Ro4cqfXr19dw5DWjfRyDeAAAeCKKUrVcPEkWAABeraioSBs2bFC/fv3KHO/Xr59Wr17t9jWJiYk6cOCAli1bJsMwdPjwYX300Ue65pprzvg+hYWFys7OLvOoLU7NLGcQDwAAT0JRqpaLP9nsfMfhHNnZUQYAAK+TkZEhu92umJiYMsdjYmKUlpbm9jWJiYlauHChBg8eLH9/f8XGxqpevXqaNm3aGd9n8uTJioiIcD2aNm1apfdRnZzL9/Zl5im/qMTkaAAAgBNFqVquecMQBflZVVDs0L7MPLPDAQAAJrFYLGW+Ngyj3DGn7du3a+zYsXr66ae1YcMGLV++XHv37tWoUaPOeP3x48crKyvL9di/f3+Vxl+dIkMDFBUWIMOQdjBbCgAAj+FrdgA4P1Yfi9rGhmnL/uNKTs1R66hQs0MCAAA1KDIyUlartdysqPT09HKzp5wmT56sXr166dFHH5UkderUSSEhIerTp4+ee+45xcXFlXtNQECAAgICqv4Gakh8bJiO5BQqOS1HXZrVNzscAAAgZkrVCbaTS/iS2OYYAACv4+/vr4SEBK1YsaLM8RUrVigxMdHta/Lz8+XjUzYNtFqtkkpnWNVFtpN9OMmXAADwHBSl6gAbzc4BAPBqDz/8sGbPnq25c+cqKSlJDz30kFJSUlzL8caPH6+hQ4e6zh84cKA++eQTzZgxQ3v27NGqVas0duxYde/eXY0aNTLrNqqVjR2LAQDwOCzfqwPiXTOlSLIAAPBGgwcPVmZmpiZNmqTU1FR16NBBy5YtU/PmzSVJqampSklJcZ0/bNgw5eTk6I033tAjjzyievXq6fLLL9eLL75o1i1UO+cOfElp2WfttwUAAGqOxairc7TPQ3Z2tiIiIpSVlaXw8HCzw/lDWfnF6jzpa0nSlmf6KSLIz+SIAACou2pbnlBdatv3oajEoQufWa5iu6GVj/dVk/rBZocEAECdVdE8geV7dUBEsJ8a1wuSxI4yAAAA7vj7+rg2hGEJHwAAnoGiVB3hXMJHXykAAAD36MMJAIBnoShVR8THsQMfAADA2dCHEwAAz0JRqo5wNe8kyQIAAHArPu5Us3MAAGA+ilJ1hHM6+o60HDkc9K4HAAD4PdvJmeX7MvJ0oshucjQAAICiVB3RomGwAnx9dKLYrt+O5psdDgAAgMeJCg1QwxB/OQzp13RmlwMAYDaKUnWEr9VHbWNONjunrxQAAEA5FouFPpwAAHgQilJ1iHNKelIaI38AAADu2OjDCQCAx6AoVYc4m50zUwoAAMA9Z7PzZJqdAwBgOopSdYhrOjpJFgAAgFvxsc7lezkyDDaHAQDATBSl6hDndPT9R08op6DY5GgAAAA8zwUxobL6WJR1olhp2QVmhwMAgFejKFWH1A/xV2x4oCRp52H6JAAAAPxegK9VraNCJEnJ9JUCAMBUFKXqGOcSvu0kWQAAAG45+3Bupw8nAACmoihVx9jiaHYOAABwNq58iR2LAQAwFUWpOsbZvJMkCwAAwD3nzHIG8QAAMBdFqTrm9JlSDgc7ygAAAPyec3OYPRl5Kii2mxwNAADei6JUHdMyMkT+Vh/lFdl14NgJs8MBAADwODHhAaoX7Ce7w9Cu9FyzwwEAwGtRlKpj/Kw+uiAmVJKUlMaUdAAAgN+zWCyu2VJJLOEDAMA0FKXqoHiSLAAAgLNy9pVKYsdiAABMQ1GqDrK5mneSZAEAALjjnCmVzMxyAABMQ1GqDjq1zTFJFgAAgDvOfCkpNVuGweYwAACYgaJUHRQfWzpT6rej+corLDE5GgAAAM9zQUyofCzSsfxiHckpNDscAAC8EkWpOqhhaICiwgJkGNKOwyzhAwAA+L1AP6taRoZIkrbThxMAAFNQlKqjXEv46CsFAADg1qmWB+RLAACYgaJUHWU7uYSPvlIAAADunRrEI18CAMAMphelpk+frpYtWyowMFAJCQn64Ycfznr+m2++KZvNpqCgILVr107z588v8/zbb7+tPn36qH79+qpfv76uuOIKrV27tjpvwSOd2uaYJAsA4D3sDkM/7s7Uvzcf1I+7M2V30MAaZ+bsw5nEzHIAAEzha+abL168WOPGjdP06dPVq1cvvfXWWxowYIC2b9+uZs2alTt/xowZGj9+vN5++21dfPHFWrt2re69917Vr19fAwcOlCR9++23uu2225SYmKjAwEC99NJL6tevn7Zt26bGjRvX9C2a5vTle4ZhyGKxmBwRAADVa/nWVE1cul2pWQWuY3ERgXpmYHtd1SHOxMjgqZz50u4juSossSvA12pyRAAAeBeLYeIeuD169FDXrl01Y8YM1zGbzaZBgwZp8uTJ5c5PTExUr1699PLLL7uOjRs3TuvXr9fKlSvdvofdblf9+vX1xhtvaOjQoRWKKzs7WxEREcrKylJ4eHgl78ozFJU4dOEzy1VsN7Ty8b5qUj/Y7JAAAKg2y7em6v73Nur3SY1zSGbGnV2rrDBVF/KEqlAXvg+GYajzxK+VXVCiZWP7qH2j2nkfAAB4mormCaYt3ysqKtKGDRvUr1+/Msf79eun1atXu31NYWGhAgMDyxwLCgrS2rVrVVxc7PY1+fn5Ki4uVoMGDaom8FrC39dHraNCJTElHQBQt9kdhiYu3V6uICXJdWzi0u0s5UM5FotF8SdnS9HyAACAmmdaUSojI0N2u10xMTFljsfExCgtLc3ta/r376/Zs2drw4YNMgxD69ev19y5c1VcXKyMjAy3r3niiSfUuHFjXXHFFWeMpbCwUNnZ2WUedUF7mncCALzA2r1HyyzZ+z1DUmpWgdbuPVpzQaHWcOVLbA4DAECNM73R+e97HZ2t/9FTTz2lAQMG6JJLLpGfn5+uv/56DRs2TJJktZbvAfDSSy/p/fff1yeffFJuhtXpJk+erIiICNejadOm535DHsTZ7JxtjgEAdVl6zpkLUudyHrxLfCz5EgAAZjGtKBUZGSmr1VpuVlR6enq52VNOQUFBmjt3rvLz87Vv3z6lpKSoRYsWCgsLU2RkZJlzX3nlFb3wwgv6+uuv1alTp7PGMn78eGVlZbke+/fvP7+b8xDxsUxHBwDUfdFhZx54Opfz4F1YvgcAgHlMK0r5+/srISFBK1asKHN8xYoVSkxMPOtr/fz81KRJE1mtVi1atEjXXnutfHxO3crLL7+sf/zjH1q+fLm6dev2h7EEBAQoPDy8zKMucM6U2puZpxNFdpOjAQCgehSV2HW2PWYtKt2Fr3tL7+oviYppGxMqi0XKyC3SkZxCs8MBAMCr+Jr55g8//LCGDBmibt26qWfPnpo1a5ZSUlI0atQoSaUzmA4ePKj58+dLknbu3Km1a9eqR48eOnbsmKZMmaKtW7fq3XffdV3zpZde0lNPPaV//etfatGihWsmVmhoqEJDQ2v+Jk0UHRaoyFB/ZeQWaefhHHVuWs/skAAAqDKGYWjW93v04vJkt03OpVO77z0zsL2sPmcrXcFbBfv7qmXDEO3JyFNyWraiwqLMDgkAAK9hak+pwYMHa+rUqZo0aZIuuugiff/991q2bJmaN28uSUpNTVVKSorrfLvdrldffVWdO3fWlVdeqYKCAq1evVotWrRwnTN9+nQVFRXp5ptvVlxcnOvxyiuv1PTteQSW8AEA6qL8ohKNXbRZk79MlsOQ/pLQRK/fepHiIsou0YuNCNSMO7vqqg5xJkWK2sDVh5MdiwEAqFGmzpSSpNGjR2v06NFun5s3b16Zr202mzZt2nTW6+3bt6+KIqsb4mPDtHJXBs07AQB1xv6j+bpvwQYlpWbL18eipwe215BLmstiseiaTo20du9RpecUKDqsdMkeM6TwR+Jjw7XslzQG8QAAqGGmF6VQvWw07wQA1CErf83QA+9v1PH8YkWG+uvN27uqR6uGruetPhb1bN3wLFcAynPlSwziAQBQoyhK1XGu6ehpOTIMQxYLo8UAgNrHMAzN/mGvJn+ZJIchdWoSoZl3JqhRvSCzQ0MdEB9bmi/tSs9Rsd0hP6upHS4AAPAa/MWt49pEh8rXx6KsE8VKzSowOxwAACrtRJFd4xZv1vPLSgtSN3Vtog9G9qQghSrTpH6QwgJ8VWw3tPtIrtnhAADgNShK1XEBvla1jirddTA5jSV8AIDaZf/RfN00Y7X+vfmQrD4WPTuwvV75SycF+lnNDg11iMViodk5AAAmoCjlBZxJVhJJFgCgFlm9K0PXvbFS21Oz1SDEX++N6KFhvVqyFB3VwrVjMYN4AADUGIpSXsCVZNHsHABQC5T2j9qjIXPX6lh+sTo0DtfSv/amgTmqFYN4AADUPBqdewHbac3OAQDwZAXFdj3x8c9asvmQJOnGLo31wo0dWa6HaufcgS+ZQTwAAGoMRSkv4Eyy9hzJVUGxncQeAOCRDhzL18gFG7TtULasPhZNuNqm4b1asFwPNaJdTOkgXnpOoTJzC9UwNMDkiAAAqPtYvucFosMCVD/YTw5D+vUwO8oAADzP6t0Zuu6NVdp2qLR/1IIR3XV3b/pHoeaEBPiqecNgScwuBwCgplCU8gIWi8U1W4rmnQAAT2IYhuau3Kshc9bqaF6RLmwUrs8e6KXE1pFmhwYvZKMPJwAANYqilJdwNjtnm2MAgKcoKLbrkQ+3aNLn22V3GBp0USN9NCpRTeoHmx0avFQ8fTgBAKhR9JTyEqd2lGHkDwBgvoPHT2jUgg365WCWfCzS36+2aQTL9WAydiwGAKBmUZTyEs7p6Mlp2TIMg6QfAGCaNXsyNWbhRmXmFal+sJ/euL2rerVhuR7M59yx+NfDuSqxO+RrZVEBAADVib+0XuKCmFD5WKRj+cVKzyk0OxwAgBcyDEPzVu3VnbN/UmZekWxx4frsgd4UpOAxmtYPVoi/VUV2h/Zm5JkdDgAAdR5FKS8R6GdVq6hQSUxJBwDUvIJiu/724c96dul2lTgMXde5kT65P1FNG9A/Cp7Dx8eidrEnWx7QVwoAgGpHUcqLxDuTLJqdAwBqUGrWCQ1+60d9vPGAfCzShKtteu3WixTkbzU7NKCc+Dj6SgEAUFMoSnkRW9ypvlIAANSEtXuPauC0ldpyIEv1gv307t3dde+lrehtCI/lypcoSgEAUO1odO5FnM07k5kpBQCoZoZhaMGa3zTp5HK9+NgwzRrSTc0aslwPns12cmZ5Msv3AACodhSlvIhzm+PdR3JVWGJXgC/LJgAAVa+g2K6n/71VH6w/IEm6tlOcXrq5k4L9STvg+Zw9pVKzCnQ8v0j1gv1NjggAgLqL5XteJC4iUBFBfipxGNqVnmt2OACAOigtq0CDZ63RB+tL+0c9MSBe027rQkEKtUZYoJ+aNgiSRB9OAACqG0UpL2KxWFzNzlnCBwCoauv2HdW101Zqy/7jigjy07zh3TXqstb0j0Kt45xdTh9OAACqF0UpL2NjRxkAQBUzDEPvrflNt81ao4zcQsXHhumzB3rp0rZRZocGnBOba8di8iUAAKoTc+m9jKvZOc07AQBVoLDErmf+vU2L1u2XJF3TsbR/VEgAKQZqr1M7FpMvAQBQncgYvQzT0QEAVeVwdoFGvbdBm1KOy2KRHusfr1GXtWK5Hmq9+JNFqR1pObI7DFl9+J0GAKA6UJTyMm1jwuRjkTJyi5SeU6DosECzQwIA1EIbfjuqUe9t1JGcQoUH+ur127roT+2izQ4LqBLNGgQryM+qE8V27c3IU5voULNDAgCgTqKnlJcJ8reqRWSIJJqdAwDOzb9+StGts9boSE6h2saE6rMHelOQQp1i9bGonXNzGGaXAwBQbShKeSEbS/gAAOegsMSu8Z/8or9/+ouK7YYGdIjVp6N7uQY7gLrE1YeTQTwAAKoNRSkvFO/aUYYkCwBQMenZBbpt1hq9vzZFFov0aP92mn5HVxqao86iDycAANWPTNILOZt3ss0xAKAiNvx2TPe/t0HpOYUKC/TV67d2Ud94luuhbmMQDwCA6kdRygs5p6PvPpKrohKH/H2ZMAcAcG/R2hQ99e+tKrYbuiA6VLOGdlNLluvBCzgH8Q4eP6GsE8WKCPIzOSIAAOoeqhFeqHG9IIUF+KrYbmhPRq7Z4QAAPFBRiUMTPv1FT3xS2j+q/4Ux+nRMLwpS8BoRQX5qXC9IkrQjjdlSAABUB4pSXshisSg+zjklnSV8AICy0nMKdPvba7Twp9L+UY9c2VYz7khQKP2j4GVOLeEjXwIAoDpQlPJStpNT0tlRBgBwuk0pxzRw2kqt/+2YwgJ8Neeubvrrny+Qj4/F7NCAGufKl2h2DgBAtWDI00s5d5RJYjo6AOCkD9bt15NLtqrI7lDrqBDNGtpNraNCzQ4LMM2pmeXkSwAAVAeKUl6K5XsAAKeiEof+8fl2LVjzmyTpyvYxmnJLZ4UF0tgZ3s05iLcjLUd2hyErMwYBAKhSFKW8VLuYMFks0pGcQmXkFioyNMDskAAAJjiSU6jRCzdo3b5jkqSHrmirv17ehuV6gKSWkSEK8PXRiWK7Uo7m0+gfAIAqRk8pLxUS4KvmDYIlsaMMAHirLfuPa+C0lVq375hCA3z19tBuevAK+kcBTlYfi9qdbHaezOxyAACqHEUpL+bqK0WSBQBe58P1+/WXt35UWnaBWkWFaMmYXrqyfYzZYQEehx34AACoPhSlvJhzRxmadwKA9yi2O/TMv7fq0Y9+VlGJQ1fYorVkTC+1iaahOeCOK19iZjkAAFWOnlJezNnsnG2OAcA7ZOQWavTCjVq796gk6cE/X6AH/8xyPeBsnDPLyZcAAKh6FKW8mO1kkvXr4VwV2x3yszJxDgDqqp8PHNeoBRt0KKtAoQG+evWWzup/YazZYQEez7l8b//RE8opKGZXSgAAqhBVCC/WpH6QQvytKrI7tDcjz+xwAADV5OMNB3TzzB91KKtArSJDtGRMIgUpoILqh/grNjxQEpvDAABQ1ShKeTEfH4vi42h2DgB1VbHdoYlLt+mRD7eoqMShy+OjteSBXmoTHWZ2aECtYjvZ8oC+UgAAVC2KUl7u1I4yJFkAUJdk5hZqyJyf9M6qfZKksZe30eyh3RTO0iOg0pyDeMkM4gEAUKXoKeXlXEkWzTsBoM7YejBLIxds0MHjJxTib9Wrt1ykqzqwXA84V6cG8ciXAACoShSlvFx75w58zJQCgDrh000H9MTHv6iwxKEWDYP19tBuuiCG5XrA+Wh/chBvR1qOHA6DHSsBAKgiFKW8XNuT/1BJyy7Qsbwi1Q/xNzkiAMC5KLE79MKyZM1dtVeS1LddlKbe2kURQSzXA85Xy8gQ+Vt9lFdk14FjJ9SsYbDZIQEAUCfQU8rLhQX6qWmDIElSEkv4AKBWOppXpKFz17oKUg/0baPZd11MQQqoIr5WH10QEypJ2s4SPgAAqkyFi1LHjh3TtGnTlJ1d/g9xVlbWGZ+D57PFOpt3soQPAGqbrQezNHDaSq3enalgf6tm3NFVf+vfTlaWFwFVykYfTgAAqlyFi1JvvPGGvv/+e4WHh5d7LiIiQj/88IOmTZtWpcGhZtDsHABqp39vPqibZ67WweMn1LxhsD4d3UsDOsaZHRZQJzmbnTOIBwBA1alwUerjjz/WqFGjzvj8yJEj9dFHH1VJUKhZNteOMiRZAFAblNgdev6L7Xpw0WYVFDt0WdsofTamt9rF0tAcqC7OmVK0OwAAoOpUuNH57t27dcEFF5zx+QsuuEC7d++ukqBQs5xJ1s7DOSqxO+RrpdUYAHiqY3lFeuD9jVq1K1OSNPpPrfVIP5brAdXNOVPqt8x85RWWKCSA/YIAADhfFa4+WK1WHTp06IzPHzp0SD4+FDNqo2YNghXkZ1VhiUP7MvPNDgcAcAbbD2Vr4BsrtWpXpoL8rHrz9q567Kp4ClJADWgYGqDosABJ0o7DzC4HAKAqVLiK1KVLFy1ZsuSMz3/66afq0qVLVcSEGubjY3Et+UhiRxkA8EifbTmkG2esKt2OvkGwPh2TqGs60T8KqEnOPpzkSwAAVI0KF6UeeOABvfrqq3rjjTdkt9tdx+12u6ZNm6b/+7//05gxY6olSFQ/dpQBAM9UYndo8rIkjX1/kwqKHepzQaQ+e6CX4mPLbzwCoHrZ4mh2DgBAVarwYvibbrpJjz32mMaOHasJEyaoVatWslgs2r17t3Jzc/Xoo4/q5ptvrs5YUY1IsgDA8xzPL9Jf39+kH37NkCSNuqy1Hu1P/6i67tixY1q6dKmGDh1qdij4HVssg3gAAFSlSnVofP7553X99ddr4cKF2rVrlwzD0KWXXqrbb79d3bt3r64YUQOcI+5MRwcAz5CUmq37FqzX/qMnFORn1Us3d9LAzo3MDgs1ICUlRcOHD6co5YHiTxvEMwxDFgsFYgAAzkeltw3p3r07Bag6yNlT6lBWgbLyixUR7GdyRADgvT7/+ZAe/fBnnSi2q2mDIL11Zze1b8RyvboiO/vsA0A5Ocxa9lStIkPlZ7Uop7BEB46dUNMGwWaHBABArVbhotT333/v9nhERITatGmjkJCQKgsKNS8iyE+N6wXp4PETSk7LVo9WDc0OCQC8jt1h6OWvdmjmd7slSX0uiNTrt3ZR/RB/kyNDVapXr95ZZ9gwA8dz+fv6qE10mJJSs5WclkNRCgCA81ThotSf/vSnMz5ntVp1//3369VXX5WfHzNsaitbXNjJolQORSkAqGHH84s0dtFmfb/ziCTpvktb6bH+7eRrrfCeJKglwsLCNGHCBPXo0cPt87/++qtGjhxZw1GhomyxJ4tSqdm6sn2M2eEAAFCrVbgodezYMbfHjx8/rrVr1+rRRx9VbGys/v73v1dZcKhZ8bHh+k9SOn2lAKCGJadl6775G5RyNF+Bfj568aZOuv6ixmaHhWrStWtXSdJll13m9vl69erJMIxKX3f69Ol6+eWXlZqaqgsvvFBTp05Vnz593J47bNgwvfvuu+WOt2/fXtu2bav0e3uT+LgwaZOURLNzAADOW4WLUhEREWc83rx5c/n7++vvf/87RalazBZ3stl5Gr0sAKCmLPslVX/7cIvyi+xqUj9Ibw1J0IWN3P/NRd1w++2368SJE2d8PjY2Vs8880ylrrl48WKNGzdO06dPV69evfTWW29pwIAB2r59u5o1a1bu/Ndee03//Oc/XV+XlJSoc+fO+stf/lKp9/VGznyJHYsBADh/FuNchuLc2Ldvnzp06KDc3NyquJypsrOzFRERoaysLIWHe09j2d1HcvXnV79TkJ9VWyf2Z8txAKhGdoehV7/eoenflvaPSmzdUG/c3lUN6B/l8TwxT+jRo4e6du2qGTNmuI7ZbDYNGjRIkydP/sPXL1myRDfeeKP27t2r5s2bV+g9PfH7UBOO5BTq4uf/I4tF2j7xKgX5W80OCQAAj1PRPKHKGlUcOnRI0dHRVXU5mKBFwxAF+vnoRLFdv2XmmR0OANRZWfnFGvHuOldB6p7eLTX/7u4UpHBOioqKtGHDBvXr16/M8X79+mn16tUVusacOXN0xRVXVLgg5c2iwgIUGeovw5B2HGa2FAAA56NKilLp6el68skndfnll1f6tdOnT1fLli0VGBiohIQE/fDDD2c9/80335TNZlNQUJDatWun+fPnlzvn448/Vvv27RUQEKD27dvr008/rXRc3sjqY1G7mDBJUjJL+ACgWuw8nKPr3lypb3ccUYCvj1679SI9eW17Gpp7kWbNmikzM9P19RtvvKHs7HPvT5SRkSG73a6YmLJNt2NiYpSWlvaHr09NTdWXX36pe+6556znFRYWKjs7u8zDW51awue93wMAAKpChTPgLl26qGvXruUerVu3VtOmTXX06FG98MILlXpzZ/+DCRMmaNOmTerTp48GDBiglJQUt+fPmDFD48eP17PPPqtt27Zp4sSJGjNmjJYuXeo658cff9TgwYM1ZMgQbdmyRUOGDNEtt9yin376qVKxeav4WJIsAKguy7ematCbq/RbZr4a1wvSx/cn0tDcCx04cEB2u9319d///ndlZGSc93UtlrLL7g3DKHfMnXnz5qlevXoaNGjQWc+bPHmyIiIiXI+mTZueT7i1Wnwsg3gAAFSFCjc6P1OiEh4ervj4ePXr109Wa+XW1E+ZMkUjRoxwjcxNnTpVX331lWbMmOG2/8GCBQs0cuRIDR48WJLUqlUrrVmzRi+++KIGDhzousaVV16p8ePHS5LGjx+v7777TlOnTtX7779fqfi8UXxcaZK1neadAFBl7A5D/7dip9743y5JUs9WDfXG7V3UMDTA5MjgCc63vWdkZKSsVmu5WVHp6enlZk+5e++5c+dqyJAh8vc/+/LR8ePH6+GHH3Z9nZ2d7bWFKecg3nYG8QAAOC8VLkpVZBeYkpIS+fpW7JLO/gdPPPFEmeNn639QWFiowMDAMseCgoK0du1aFRcXy8/PTz/++KMeeuihMuf0799fU6dOPWMshYWFKiwsdH3NdPTS7ckBAOcv60Sxxi3apP/tOCJJurtXS/396niW66HK+Pv7KyEhQStWrNANN9zgOr5ixQpdf/31Z33td999p127dmnEiBF/+D4BAQEKCKCQKpVdvlfRGWkAAKC8Chelzmb79u2aPXu2Fi5cqMOHD1foNefS/6B///6aPXu2Bg0apK5du2rDhg2aO3euiouLlZGRobi4OKWlpVW6p8LkyZM1ceLECsVd1zmnox84dkLZBcUKD/QzOSIAqL1+PZyj+xZs0N6MPAX4+mjyjR11Y9cmZocFDzB79myFhoZKKh3UmzdvniIjI8ucM3bs2Apf7+GHH9aQIUPUrVs39ezZU7NmzVJKSopGjRolqXSW08GDB8v14pwzZ4569OihDh06nOcdeZfW0SHy9bEou6BEqVkFalQvyOyQAAColc65KJWbm6tFixZpzpw5WrdunS655JJys54qojL9D5566imlpaXpkksukWEYiomJ0bBhw/TSSy+VWTpY2Z4KTEc/pV6wv+IiApWaVaAdaTm6uEUDs0MCgFrpq21penjxZuUV2dUoIlBvDemmjk0izA4LHqBZs2Z6++23XV/HxsZqwYIFZc6xWCyVKkoNHjxYmZmZmjRpklJTU9WhQwctW7bMtZteampquZ6dWVlZ+vjjj/Xaa6+dx914pwBfq1pHhWrH4RwlpWZTlAIA4BxVuii1cuVKzZ49Wx9//LFatmyp7du367vvvlOvXr0qdZ1z6X8QFBSkuXPn6q233tLhw4cVFxenWbNmKSwszDW6GBsbW+meCkxHL8sWF67UrAIlp2ZTlAKASnI4DE39z069/t/S/lE9WjbQm3d0VST9o3DSvn37quW6o0eP1ujRo90+N2/evHLHIiIilJ+fXy2xeANbXJh2HM5RclqO/mw7e+8uAADgXoUbWrz00kuKj4/XrbfeqqioKK1cuVI///yzLBaL6tevX+k3Pr3/welWrFihxMTEs77Wz89PTZo0kdVq1aJFi3TttdfKx6f0Vnr27Fnuml9//fUfXhOnOJfwJbGjDABUSnZBse6dv95VkBqW2ELv3dODghRQB8Wf7CuVRLNzAADOWYVnSv3973/X448/rkmTJlV6l70zqWz/g507d2rt2rXq0aOHjh07pilTpmjr1q169913Xdd88MEHdemll+rFF1/U9ddfr3//+9/6z3/+o5UrV1ZJzN6AJAsAKm9Xeq7uW7Bee47kyd/XRy/c0FE3J9A/CqirnIN4yQziAQBwzipclJo0aZLmzZunBQsW6LbbbtOQIUPOuylmZfsf2O12vfrqq9qxY4f8/PzUt29frV69Wi1atHCdk5iYqEWLFunJJ5/UU089pdatW2vx4sXq0aPHecXqTWwnk6wdaTlyOAz5+LCjDACczYrth/XQ4s3KLSxRXESg3hqSoE5N6pkdFoBq5NyBb8+RXBUU2xXoVzWDtgAAeBOLYRhGZV7w3Xffae7cufr444/VunVrbdu27Zx6Snmy7OxsRUREKCsrS+Hh4WaHU+NK7A61f+YrFZU49N2jf1LzhiFmhwQAHsnhMPTaN7/qtW9+lSR1b1HaPyoqjOV6dZm35wlO3v59MAxDCc/9R0fzirT0gd5sZAAAwGkqmidUuKeU02WXXaZ3331Xqampuv/++5WQkKDLLrtMiYmJmjJlynkFDc/ga/VR25jSbaqTUpmSDgDu5BQU674FG1wFqbt6NtfCe3tQkAK8hMViOa0PJy0PAAA4F5UuSjmFhYVp1KhR+umnn7Rp0yZ1795d//znP6syNpgoPpa+UgBwJruP5GrQm6v0n6TD8rf66KWbO2ni9R3kZz3nP6vwQlarVenp6eWOZ2ZmVln/TlQv8iUAAM5PlWTPHTt21NSpU3Xw4MGquBw8gLNPQjIjfwBQxn+2H9agN1Zp95E8xYYH6oNRPXVLt6Zmh4Va6EwdFAoLC+Xv71/D0eBc2OJONjtnZjkAAOekwo3OK8LPz68qLwcT2dhRBgDKcDgMTfvvLv3ff3ZKki5uUV/T70hguR4q7fXXX5dUuvxr9uzZCg0NdT1nt9v1/fffKz4+3qzwUAmnD+IZhiGLhc1hAACojCotSqHuiD+ZZP2Wma/cwhKFBvCrAsB75RQU65EPtujr7YclSUMuaa6nrm0vf1+W66Hy/u///k9S6UypmTNnllmq5+/vrxYtWmjmzJlmhYdKaBMdKquPRcfyi3U4u1CxEYFmhwQAQK1CpQFuNQjxV0x4gA5nF2pHWo4Smtc3OyQAMMWeI7m6b8EG7UrPlb/VR/8YdKEGX9zM7LBQi+3du1eS1LdvX33yySeqX5+/sbVVoJ9VrSJD9Gt6rpLSsilKAQBQSQzx4oyczTvpKwXAW/03+bCuf3OVdqXnKiY8QItHXkJBClXmf//7X5mClN1u1+bNm3Xs2DETo0JlOWeX01cKAIDKq3RRip1ivEf8yead7CgDwNs4HIamffOrRry7XjkFJUpoXl9L/9pbXZoxowVVZ9y4cZozZ46k0oLUpZdeqq5du6pp06b69ttvzQ0OFRYfS74EAMC5qnRRip1ivEd7Rv4AeKHcwhKNXrhRr67YKcOQ7ujRTO/fe4miw1iWg6r14YcfqnPnzpKkpUuXat++fUpOTta4ceM0YcIEk6NDRbVnx2IAAM5ZhXtKsVOM9zm1fC+HHWUAeIW9GXm6b/56/ZqeKz+rRZOu76DburNcD9UjMzNTsbGxkqRly5bpL3/5i9q2basRI0a48i54PufM8t1H8lRYYleALysHAACoqAoXpdgpxvu0igqRn9Wi3MISHTh2Qk0bBJsdEgBUm//tSNfY9zcpp6BE0WEBmnFnAps8oFrFxMRo+/btiouL0/LlyzV9+nRJUn5+Pi0RapHY8EBFBPkp60SxdqXn6sJGEWaHBABArVHhohQ7xXgfP6uP2kSHKSk1W0mp2RSlANRJhmFo+re79crXO2QYUtdm9TTjzgTFhLNcD9Vr+PDhuuWWWxQXFyeLxaIrr7xSkvTTTz8x+7wWsVgsio8N0097jyopNYeiFAAAlVDhopTT//73vzJf2+12/fLLL2revDmFqjrIFldalEpOy1G/C2PNDgcAqlReYYn+9uEWfbk1TZJ0W/dmeva69iy/QY149tln1aFDB+3fv19/+ctfFBAQIKl0U5knnnjC5OhQGba4cP2096iSaXYOAEClVLooNW7cOHXs2FEjRoxw7RTz448/Kjg4WJ9//rn+9Kc/VUOYMIstNlzSQZp3AqhzfsvM033zN2jH4Rz5WS169roLdUeP5maHBS9z8803S5IKCgpcx+666y6zwsE5sp3sK5WcxuYwAABURqV332OnGO/ibN6ZxA58AOqQb3eka+C0ldpxOEdRYQF6/95LKEihxtntdv3jH/9Q48aNFRoaqj179kiSnnrqKc2ZM8fk6FAZzs1hklKzz7hTNQAAKK/SRamz7RTzyy+/VHmAMJft5DbH+zLzlF9UYnI0AHB+SvtH7dLweeuUXVCii5rW0+d/7a1uLRqYHRq80PPPP6958+bppZdekr+/v+t4x44dNXv2bBMjQ2W1jQmTj0XKzCvSkdxCs8MBAKDWqHRRyrlTjN1u1/Lly3XFFVdIYqeYuioyNECRoQEyDGnn4VyzwwGAc5ZfVKIH3t+kl5aXNjQf3K2pFo+8hIbmMM38+fM1a9Ys3XHHHWVyqE6dOik5OdnEyFBZQf5WtYgMkSQlM7scAIAKq3RRyrlTTIcOHdgpxkvYXEv46CsFoHZKyczXjdNX64ufU+XrY9E/BnXQP2/qSENzmOrgwYNq06ZNueMOh0PFxcUmRITzYTttCR8AAKiYSjc6Z6cY72OLC9cPv2awowyAWun7nUf01/c3KetEsSJDAzTjzq66mOV68AAXXnihfvjhBzVvXraf2YcffqguXbqYFBXOlS0uTF/8kkqzcwAAKqHSRSmJnWK8TXzsyZlSJFkAahHDMDTr+z16cXmyHIbUuWk9zbyzq+IigswODV7u7rvv1muvvaZnnnlGQ4YM0cGDB+VwOPTJJ59ox44dmj9/vj7//HOzw0QlxTNTCgCASqv08j12ivE+7CgDoLbJLyrR2EWbNfnL0oLUXxKaaPF9l1CQgkd49913deLECQ0cOFCLFy/WsmXLZLFY9PTTTyspKUlLly51tUdA7eHcsXj3kVwVlThMjgYAgNqh0kUpdorxPm2iQ+XrY1FOQYkOZRX88QsAwET7j5b2j1q65ZB8fSyadP2FeunmTgr0o38UPMPpAzz9+/fXd999p9zcXOXn52vlypXq16+fidHhXDWuF6SwQF8V2w3tPsLmMAAAVESli1LsFON9/H191CY6VJLoKwXAo638NUMD31ip5LQcRYb6a+E9PTS0ZwtZLBazQwPK4Hey7rFYLK5m58lp5EsAAFREpXtKsVOMd4qPDVNyWo6SUrP1Z1uM2eEAQBmGYWj2D3s1+cskOQypU5MIzbwzQY3qsVwPnqlt27Z/WJg6evRoDUWDqhIfF6a1+44qKTVHN9CrHgCAP1TpohQ7xXgnW1y4lmw+RLNzAB7nRJFdj3/8sz7bckiSdFPXJnr+hg4s14NHmzhxoiIiIswOA1XMFkezcwAAKqPCRSl2ivFu8SeTLJbvAfAk+4/ma+SCDdqemi2rj0VPXWPTXYks14Pnu/XWWxUdHW12GKhizh2LkxnEAwCgQircU4qdYryb7WSStTcjTwXFdpOjAQBp9a4MXffGSm1PzVbDkNL+UcN6taQgBY/H72jd1TYmTBaLdCSnUBm5hWaHAwCAx6vwTKnf7xTTv3//agkInikqLEANQvx1NK9IOw/nqFOTemaHBMBLGYahOSv36oVlpf2jOjaO0MwhCWpM/yjUEqfnVKhbQgJ81bxBsPZl5is5NUe9LwgwOyQAADxapXbfY2TPe1ksFtniTk5JT2VKOgBzFBTb9dDizXrui9KC1I1dGuvDUT0pSKFWcTgcLN2rw5x9pdiBDwCAP1apRufsFOPd4mPDtWpXppJIsgCY4MCx0v5R2w6V9o+acLVNw3vRPwqAZ4mPDdeXW9OUxCAeAAB/qFJFKXaK8W7O5p3sKAOgpq3enaEH/rVJR/OK1CDEX2/c3kWJrSPNDgsAyomPI18CAKCiKlWUYqcY73ZqOnqODMNgdgKAamcYht5ZtU/PL0uS3WHowkbhemtIgprUDzY7NABwq/3JfGlXeq6K7Q75WSvVLQMAAK9S4b+SFCDQJjpUVh+LjucX63A2O8oAqF4FxXY98uEWTfp8u+wOQzd0aayP70+kIAXAozWuF6TQAF8V2R3am5FndjgAAHi0Chel2CkGgX5WtYoMkcSUdADV6+DxE/rLzB/1ycaDsvpY9OQ1Nk25pbMC/axmhwYAZ+XjY1E7Wh4AAFAhFS5KsVMMpFNL+Gh2DqC6rNmTqeumrdQvB7NUP9hPC+7urnv6tGLGLoBaw+bqK0WzcwAAzoZF7qgUZ/POZJIsAFXMMAzNW7VXd87+SZl5RWofF67PHuitxDY0NAdQu8THOvtwMogHAMDZVKrROWA7mWQxHR1AVSootmvCp1v18cYDkqTrOjfSizd1UpA/y/UA1D42duADAKBCKEqhUpzL9/Zk5Kmg2E5/FwDnLTXrhEYt2KAtB7LkY5HGD7Dpnj4tWa4HoNZqd3IQ73B2oY7mFalBiL/JEQEA4JlYvodKiQkPUL1gP9kdhnal55odDoBabu3eoxo4baW2HMhSvWA/zb+7h+69lP5RAGq30ABfNWtQulMoS/gAADgzilKoFIvFonh2lAFwngzD0Pwf9+n2t9coI7dI8bFhWvpAb/W+gP5RAOoGZ75EH04AAM6MohQqzbmELzmNJAtA5RUU2/X4xz/r6X9vU4nD0LWd4vTJ6EQ1PTmrAADqAteOxQziAQBwRvSUQqXZ2FEGwDlKyyrQyPc2aMv+4/KxSI9fFa/7WK4HoA5yNjtnEA8AgDOjKIVKi3ftKJMjwzD4xySAClm376juf2+jMnILFRHkp2m3ddGlbaPMDgsAqkX8yUG8nYdzVGJ3yNfKAgUAAH6Pv46otLYxYfKxSEfzinQkp9DscAB4OMMw9N6a33TbrDXKyC1UfGyYPnugFwUpAHVaswbBCva3qrDEoX2ZeWaHAwCAR6IohUoL9LOqZWSIJCmJKekAzqKwxK7xn/yiJ5dsVYnD0DUdS/tHNW8YYnZoAFCtfHwsahd7anY5AAAoj6IUzkm8s9k5zTsBnMHh7ALdOmuNFq3bL8vJ/lFv3N5Fwf6sHAfgHeLpwwkAwFlRlMI5sblG/kiyAJS34bejunbaSm1KOa7wQF+9M+xi3f+n1vSgA+BVbHHMlAIA4GwYrsY5cW5zzI4yAH7vXz+l6JnPtqrYbqhtTKhmDemmFpEs1wPgfWzMLAcA4KwoSuGcOJfv7UrPVVGJQ/6+TLoDvF1hiV3PfrZd769NkSQN6BCrV/7SWSEB/KkB4J2cPaUOZRUoK79YEcF+JkcEAIBnoZKAc9IoIlDhgb4qcRjalZ5rdjgATJaeXaDbZq3R+2tTZLFIj/Zvp+l3dKUgBcCrhQf6qXG9IElSEn2lAAAoh6IUzonFYjnV7JwkC/BqG347pmunrdTGlOMKC/TV3GEXa0zfNvSPAgCxhA8AgLOhKIVz5mx2Tl8pwHstWpuiW2f9qPScQl0QHarPHuitvu2izQ4LADyGs9k5+RIAAOWxrgLnzDlTih34AO9TVOLQxKXbtPCn0v5R/S+M0au3XKRQlusBQBnxseRLAACcCf96wDmzuYpSjPwB3iQ9p0Cj39uo9b8dk8UiPXJlW43+Uxv5+LBcDwB+zzlTasfhHNkdhqz8vxIAABeKUjhnbWNCZbFIGbmFOpJTqKiwALNDAlDNNqUc06j3NuhwdqHCAnz12m0X6fL4GLPDAgCP1bxhiAL9fFRQ7NBvmXlqFRVqdkgAAHgMekrhnAX7+6pFwxBJNDsHvMEH6/Zr8FtrdDi7UG2iQ/XvB3pRkAKAP2D1sahdDH2lAABwh6IUzku8s9k5S/iAOquoxKGnlmzVYx//rCK7Q/3ax+jT0YmM9gNABdFXCgAA9yhK4by4+koxUwqok47kFOqO2Wu0YM1vkqSHrmirmXcmKCzQz+TIAKD2cPaVog8nAABl0VMK54WZUkDdtXn/cY1asEFp2QUKC/DV/w2+SFe0Z7keAFSWc8di2h0AAFAWRSmcF+dMqV3puSq2O+RnZfIdUBd8sH6/nlyyVUUlDrWKCtHbQ7upNcv1AOCc2E4u3ztw7ISyC4oVzmxTAAAksXwP56lJ/SCFBviqyO7QniN5ZocD4DwV2x165t9b9dhHP6uoxKErbDFaMqYXBSkAOA8RwX5qFBEoSdpBs3MAAFwoSuG8WCyWU0v4mJIO1GoZuYW6Y/ZPevfH0v5R4664QLOGJDCiDwBVwLWEj2bnAAC4UJTCeYs/2bxzO0kWUGv9fOC4Bk5bqbV7jyo0wFezhiRo3BVt5eNjMTs0AKgTnIN42+nDCQCAi+lFqenTp6tly5YKDAxUQkKCfvjhh7Oev3DhQnXu3FnBwcGKi4vT8OHDlZmZWeacqVOnql27dgoKClLTpk310EMPqaCgoDpvw6vZXCN/JFlAbfTxhgO6eeaPSs0qUKvIEC0Zk6h+F8aaHRYA1Ck2mp0DAFCOqUWpxYsXa9y4cZowYYI2bdqkPn36aMCAAUpJSXF7/sqVKzV06FCNGDFC27Zt04cffqh169bpnnvucZ2zcOFCPfHEE3rmmWeUlJSkOXPmaPHixRo/fnxN3ZbXiY8lyQJqo2K7Q89+tk2PfLhFRSUO/Tk+Wkse6KU20WFmhwYAdY7t5MzyHWk5cjgMk6MBAMAzmFqUmjJlikaMGKF77rlHNptNU6dOVdOmTTVjxgy3569Zs0YtWrTQ2LFj1bJlS/Xu3VsjR47U+vXrXef8+OOP6tWrl26//Xa1aNFC/fr102233VbmHFStdienox/OLtTRvCKTowFQEZm5hbpz9k+at3qfJGns5W309tBu9I8CgGrSomGI/H19lF9kV8rRfLPDAQDAI5hWlCoqKtKGDRvUr1+/Msf79eun1atXu31NYmKiDhw4oGXLlskwDB0+fFgfffSRrrnmGtc5vXv31oYNG7R27VpJ0p49e7Rs2bIy5/xeYWGhsrOzyzxQcaEBvmreMFgSzTuB2mDrwSxd98Yq/bT3qEL8rZp5Z4Ie7teO/lEAUI18rT5qF8PmMAAAnM60olRGRobsdrtiYmLKHI+JiVFaWprb1yQmJmrhwoUaPHiw/P39FRsbq3r16mnatGmuc2699Vb94x//UO/eveXn56fWrVurb9++euKJJ84Yy+TJkxUREeF6NG3atGpu0os4m3cmsc0x4NE+3XRAN81YrYPHT6hlZIiWjOmlqzrQPwoAaoIrX6IPJwAAkjyg0bnFUnZk3jCMcsectm/frrFjx+rpp5/Whg0btHz5cu3du1ejRo1ynfPtt9/q+eef1/Tp07Vx40Z98skn+vzzz/WPf/zjjDGMHz9eWVlZrsf+/fur5ua8iLOvVBIzpQCPVGJ3aNLS7Xpo8RYVljjUt12UlozppQti6B8FADUlnmbnAACU4WvWG0dGRspqtZabFZWenl5u9pTT5MmT1atXLz366KOSpE6dOikkJER9+vTRc889p7i4OD311FMaMmSIq/l5x44dlZeXp/vuu08TJkyQj0/5OlxAQIACAgKq+A69CzvKAJ7raF6RHvjXRq3eXbpT6QN92+ihK9vKynI9AKhRzmbnzJQCAKCUaTOl/P39lZCQoBUrVpQ5vmLFCiUmJrp9TX5+frmiktVqlVQ6w+ps5xiG4ToHVc+ZZO08nKsSu8PkaAA4bT2YpYHTVmr17kwF+1s1886u+lv/dhSkAMAEzpnlKUfzlVtYYnI0AACYz9Tlew8//LBmz56tuXPnKikpSQ899JBSUlJcy/HGjx+voUOHus4fOHCgPvnkE82YMUN79uzRqlWrNHbsWHXv3l2NGjVynTNjxgwtWrRIe/fu1YoVK/TUU0/puuuucxWwUPWa1g9WsL9VRSUO7c3IMzscAJL+vfmgbp5Z2j+qecNgfTq6l67qEGd2WADgtRqE+CsmvHR2/g76cAIAYN7yPUkaPHiwMjMzNWnSJKWmpqpDhw5atmyZmjdvLklKTU1VSkqK6/xhw4YpJydHb7zxhh555BHVq1dPl19+uV588UXXOU8++aQsFouefPJJHTx4UFFRURo4cKCef/75Gr8/b+LjY1G72DBtSjmupLQc+tQAJiqxO/Ti8mS9/cNeSdJlbaP0+q1dFBHsZ3JkAID42HAdzj6ipNRsJTSvb3Y4AACYymKwpq2c7OxsRUREKCsrS+Hh4WaHU2v8/dNf9K+fUjT6T6312FXxZocDeKVjeUV64P2NWrWrtH/U6D+11iP9WK4HVCXyhFJ8H87NP79M1szvduvOS5rpuUEdzQ4HAIBqUdE8wdSZUqhbbCe3OU5mOjpgiu2HsnXfgvU6cOyEgvyseuUvnXVNJ5brAYAncfbhTKbZOQAAFKVQdZzbHCelsgMfUNM+23JIj320RQXFDjVrEKxZQxNcDXUBAJ7D+f/m5LQcORyGfJjJCgDwYhSlUGXanZwplZpVoOP5RaoX7G9yREDdV2J36OWvduit7/dIkvpcEKlpt3Xh8wcAHqpVVIj8rT7KLSzRweMn1LRBsNkhAQBgGlN330PdEh7opyb1gySxhA+oCcfzizR83jpXQWrUZa01b3h3ClIA4MH8rD5qEx0qidnlAABQlEKVck5JJ8kCqldSarYGvrFSP/yaoSA/q6bd1kVPDIinoTkA1ALxJ/tKJdFXCgDg5ShKoUq1p3knUO0+//mQbpy+WvuPnlDTBkH6ZHSiBnZuZHZYAIAKah/n7CvFIB4AwLvRUwpVKp4kC6g2doehl7/aoZnf7ZZU2j/q9Vu7qH4Iy/UAoDY5vdk5AADejKIUqlT8yWbnOw7nyO4wWEoEVJHj+UUau2izvt95RJI08tJWerR/O/lamfAKALWNc/nevsw85ReVKNiflBwA4J341wyqVPOGIQrys6qg2KF9mXlmhwPUCclp2brujVX6fucRBfr56PXbumj81TYKUgBQS0WGBigqLECGIe1gthQAwIvxLxpUKauPRW1j6SsFVJVlv6TqxumrlXI0X03qB+nj+xN1Hf2jAKDWc84uZwkfAMCbUZRClbPFOneUoa8UcK7sDkMvLU/W6IUblV9kV682DfXZA711YaMIs0MDAFQBm7MPJ/kSAMCLsYAdVc5Gs3PgvGTlF+vBxZv07Y7S/lH39mmpx6+KZ7keANQhtjjnIB4zpQAA3ouiFKpcfCxJFnCudh7O0b3z1+u3zHwF+PropZs76fqLGpsdFgCgijl34EtKy5ZhGLJY2BwGAOB9KEqhyjmTrIPHTyi7oFjhgX4mRwTUDsu3purhD7Yov8iuxvWC9NaQBHVozHI9AKiLWkeFytfHopyCEh3KKlDjekFmhwQAQI1jLQiqXESwnxpFBEqi2TlQEXaHoVe+2qFR75X2j+rZqqGW/rU3BSkAqMP8fX3UJjpUkpR0iJYHAADvRFEK1YK+UkDFZJ0o1j3vrtMb/9slSRrRu6UWjOiuBiH+JkcGAKhu5EsAAG/H8j1Ui/i4MH2TnE5fKeAsfj2co/sWbNDejDwF+Pronzd11A1dmpgdFgCghrj6cKaRLwEAvBNFKVQLV/NOtjkG3Fq+NU2PfLBZeUV2NYoI1Kyh3ViuBwBeJj6OfAkA4N0oSqFaOKej70jLkcNhyMeHHWUASXI4DE39z069/t/S5XqXtGqgN2/vqoahASZHBgCoaba40plS+zLydKLIriB/q8kRAQBQs+gphWrRomGwAnx9dKLYrpSj+WaHA3iE7IJi3Tt/vasgNbxXCy0Y0YOCFIAqMX36dLVs2VKBgYFKSEjQDz/8cNbzCwsLNWHCBDVv3lwBAQFq3bq15s6dW0PRQpKiQgPUMMRfDkP6NZ0lfAAA78NMKVQLX6uP2saE6ZeDWUpKzVaLyBCzQwJMtSs9R/fN36A9GXny9/XR5Bs66qYE+kcBqBqLFy/WuHHjNH36dPXq1UtvvfWWBgwYoO3bt6tZs2ZuX3PLLbfo8OHDmjNnjtq0aaP09HSVlJTUcOTezWKxKD4uTKt2ZSopNVudmtQzOyQAAGoURSlUG1vcyaJUWo4GdIwzOxzANCu2H9ZDizcrt7BEcRGBemtIAv/wAFClpkyZohEjRuiee+6RJE2dOlVfffWVZsyYocmTJ5c7f/ny5fruu++0Z88eNWjQQJLUokWLmgwZJ9liw08WpZgpBQDwPizfQ7VxNjtPpnknvJTDYej/VuzUvfPXK7ewRN1bNtDSv/amIAWgShUVFWnDhg3q169fmeP9+vXT6tWr3b7ms88+U7du3fTSSy+pcePGatu2rf72t7/pxIkTNREyTuNsdp6cRr4EAPA+zJRCtYmPc25zTJIF75NTUKyHFm/Rf5IOS5Lu6tlcT17bXn5WxgIAVK2MjAzZ7XbFxMSUOR4TE6O0tDS3r9mzZ49WrlypwMBAffrpp8rIyNDo0aN19OjRM/aVKiwsVGFhoevr7Gz+vleF+NjSfCk5LUeGYchiYXMYAID34F9HqDa2kzOl9h89oZyCYpOjAWrO7iO5GvTmKv0n6bD8fX308s2dNPH6DhSkAFSr3xczzlbgcDgcslgsWrhwobp3766rr75aU6ZM0bx58844W2ry5MmKiIhwPZo2bVrl9+CNLogJldXHouP5xUrLLjA7HAAAahT/QkK1qR/ir9jwQEnSzsP0SYB3+M/2wxr0xirtPpKn2PBAfTiyp/7SjX+4Aag+kZGRslqt5WZFpaenl5s95RQXF6fGjRsrIiLCdcxms8kwDB04cMDta8aPH6+srCzXY//+/VV3E14swNeq1lGlG8Ik01cKAOBlKEqhWjmX8G0nyUId53AYeu0/v+qe+euVU1iii1vU19K/9lbnpvXMDg1AHefv76+EhAStWLGizPEVK1YoMTHR7Wt69eqlQ4cOKTc313Vs586d8vHxUZMm7ncGDQgIUHh4eJkHqoazDyctDwAA3oaiFKoVzc7hDXIKijXqvQ36v//slCQNuaS5Ft5ziaLCAkyODIC3ePjhhzV79mzNnTtXSUlJeuihh5SSkqJRo0ZJKp3lNHToUNf5t99+uxo2bKjhw4dr+/bt+v777/Xoo4/q7rvvVlBQkFm34bVcfTgZxAMAeBkanaNa2eJONe8E6qI9R3J134IN2pWeK3+rj54b1EG3XMxyPQA1a/DgwcrMzNSkSZOUmpqqDh06aNmyZWrevLkkKTU1VSkpKa7zQ0NDtWLFCv31r39Vt27d1LBhQ91yyy167rnnzLoFr2aLYxAPAOCdKEqhWjmTrB1pOXI4DPn4sKMM6o7/Jh/Wg+9vVk5hiWLCAzTzzgR1aVbf7LAAeKnRo0dr9OjRbp+bN29euWPx8fHllvzBHM7NYfZk5Kmg2K5AP6vJEQEAUDNYvodq1TIyRP5WH+UWlujAMfe7+QC1jcNhaNo3v2rEu6X9o7o1L+0fRUEKAHAuYsIDVC/YT3aHoV3puX/8AgAA6giKUqhWflYfXRATKonmnagbcgtLNHrhRr26YqcMQ7qjRzP9695LFB0WaHZoAIBaymKxuGZLJbGEDwDgRShKodqdanZOXynUbnsz8nTDm6u0fFua/KwWTb6xo56/oaP8fflfKQDg/MTThxMA4IXoKYVqZ3PtKMPIH2qv/+1I19j3NymnoETRYQGacWeCEpqzXA8AUDWYKQUA8EYUpVDtXDvKsHwPtZBhGJr+7W698vUOGYbUtVk9zbwzQdHhLNcDAFQdZ76UlJotwzBksbA5DACg7qMohWoXH1s6U+q3o/nKKyxRSAC/dqgd8gpL9LcPt+jLrWmSpNu6N9Oz17VXgC+7IgEAqtYFMaHysUjH8ot1JKeQwQ8AgFegEQqqXcPQAEWFBcgwpB2H6ZOA2uG3zDzdOH21vtxa2j/qhRs6avKNHSlIAQCqRaCfVS0jQyRJ21nCBwDwEhSlUCNcS/hodo5a4Nsd6Ro4baV2HM5RVFiAFt13iW7v0czssAAAddyplgfkSwAA70BRCjXCFuvcUYaRP3iu0v5RuzR83jplF5Tooqb19PlfeyuheQOzQwMAeIFTg3jkSwAA70BzH9SIeHbgg4fLLyrRox/9rC9+TpUk3XpxU028/kKW6wEAaky8axCPmVIAAO9AUQo14vTle+woA0+Tkpmv+xasV3Jajnx9LHr2ugt1R49m/J4CAGqUM1/alZ6rwhI7AyMAgDqP5XuoEa0iQ+VntSinsEQHj58wOxzA5fudRzTwjZVKTstRZGiA3r/vEt15SXMKUgCAGhcXEajwQF+VOAztTs8zOxwAAKodRSnUCH9fH7WOCpVEs3N4BsMw9NZ3uzXsnbXKOlGszk3raelfe+niFvSPAgCYw2KxKN7V7JyWBwCAuo+iFGqMc0o6faVgtvyiEo1dtFmTv0yWw5Bu6dZEi++7RHERQWaHBgDwcs7NYciXAADegJ5SqDG2uDB9uonmnTDX/qP5unf+qf5Rzwxsz3I9AIDHcPXhJF8CAHgBilKoMfGxJ2dKMR0dJln5a4YeeH+jjucXKzLUX9PvSFD3lizXAwB4jnjXzHKKUgCAuo+iFGpMfFzpdPR9GXk6UWRXkD87yqBmGIah2T/s1eQvk+QwpE5NIjTzzgQ1qsdyPQCAZ2kbEyqLRcrILdSRnEJFhQWYHRIAANWGnlKoMdFhgYoM9ZfDkHYeZvQPNeNEkV0PLtqs55eVFqRuTmiiD0b2pCAFAPBIwf6+atkwRBLNzgEAdR9FKdQo5xI+kizUhP1H83XTjNX6bMshWX0smnjdhXr55k4K9GOWHgDAczlnl7NjMQCgrqMohRoV79pRhiQL1Wv1rgxd98ZKbU/NVsMQfy28p4fuSmxBQ3MAgMdz9eFkBz4AQB1HTynUKFscSRaql2EYmrNyr144uVyvY+MIzRySoMYs1wMA1BKufIkd+AAAdRxFKdQo13T0tBwZhsGsFVSpgmK7nvj4Zy3ZfEiSdGPXxnrhho4s1wMA1CrOmeW70nNUbHfIz8riBgBA3URRCjWqTXSofH0syjpRrNSsAppNo8ocOJavkQs2aNuhbFl9LJpwtU3De7FcDwBQ+zSpH6SwAF/lFJZoz5E8tTtZpAIAoK5h2AU1KsDXqtZRoZJodo6qs3p3hq57Y5W2HcpWgxB/LRjRXXf3bklBCgBQK1ksFtfscloeAADqMopSqHGnkiz6JOD8GIahuSv3asictTqaV6QLG4Xrswd6KbF1pNmhAQBwXlzNzhnEAwDUYSzfQ42Ljw3Xv3WIkT+cl4Jiu/7+yS/6ZNNBSdINXRpr8o30jwIA1A2uPpwM4gEA6jCKUqhxttOanQPn4uDxExq1YIN+OZglq49F4wfEawTL9QAAdQg7FgMAvAFFKdQ4Z5K150iuCortzGxBpazZk6kxCzcqM69I9YP99ObtXZXYhuV6AIC6pV1M6SBeek6hMnML1TA0wOSIAACoevSUQo2LDgtQ/WA/OQxpV3qu2eGgljAMQ/NW7dUds39SZl6R2seF67MHelOQAgDUSSEBvmreMFiStIPZ5QCAOoqiFGqcxWJxNe/czpR0VEBBsV1/+/BnPbt0u+wOQ9d1bqSP709U0wbBZocGAEC1iY8tnS1FvgQAqKsoSsEUziV8NO/EHzl0/IRueetHfbzxgHws0oSrbXrt1osU5M+yTwBA3ebKl5gpBQCoo+gpBVO4dpRhm2OcxU97MjXmXxuVkVukesF+euO2rup9Acv1AADewTmznHwJAFBXUZSCKWyxp3aUMQyDXdNQhmEYWrDmN01aul0lDkPxsWF6e2g3lusBALyKc8finYdzVWJ3yNfKIgcAQN1i+l+26dOnq2XLlgoMDFRCQoJ++OGHs56/cOFCde7cWcHBwYqLi9Pw4cOVmZlZ5pzjx49rzJgxiouLU2BgoGw2m5YtW1adt4FKuiAmVD4W6Vh+sdJzCs0OBx6koNiuxz76WU//e5tKHIau7RSnT0bTPwoA4H2a1g9WiL9VRSUO7c3IMzscAACqnKlFqcWLF2vcuHGaMGGCNm3apD59+mjAgAFKSUlxe/7KlSs1dOhQjRgxQtu2bdOHH36odevW6Z577nGdU1RUpCuvvFL79u3TRx99pB07dujtt99W48aNa+q2UAGBfla1igqVVDpbCpCk1KwTGjxrjT7cUNo/avyAeE27rYuC/ZnUCQDwPj4+FrU72ew8ib5SAIA6yNSi1JQpUzRixAjdc889stlsmjp1qpo2baoZM2a4PX/NmjVq0aKFxo4dq5YtW6p3794aOXKk1q9f7zpn7ty5Onr0qJYsWaJevXqpefPm6t27tzp37lxTt4UKcu4ok0Szc0hat++oBk5bpS37jysiyE/zhnfXyMtas7QTAODV4uNOtTwAAKCuMa0oVVRUpA0bNqhfv35ljvfr10+rV692+5rExEQdOHBAy5Ytk2EYOnz4sD766CNdc801rnM+++wz9ezZU2PGjFFMTIw6dOigF154QXa7vVrvB5V3akcZkixv5uwfddusNcrILVR8bJiWPtBbl7aNMjs0AABMd2rHYvIlAEDdY9qamIyMDNntdsXExJQ5HhMTo7S0NLevSUxM1MKFCzV48GAVFBSopKRE1113naZNm+Y6Z8+ePfrvf/+rO+64Q8uWLdOvv/6qMWPGqKSkRE8//bTb6xYWFqqw8FRfo+xs/ujXBGfzzmRmSnmtwhK7nl6yTYvX75ckXdMxTi//pRPL9QAAOMkW69yxmHwJAFD3mN7o/PdLc862E9v27ds1duxYPf3009qwYYOWL1+uvXv3atSoUa5zHA6HoqOjNWvWLCUkJOjWW2/VhAkTzrgkUJImT56siIgI16Np06ZVc3M4K+c2x7uP5KqwhJls3uZwdoFunbVGi9fvl8UiPX5VvN64nf5RAACcztlTKjWrQMfzi0yOBgCAqmVaUSoyMlJWq7XcrKj09PRys6ecJk+erF69eunRRx9Vp06d1L9/f02fPl1z585VamqqJCkuLk5t27aV1Wp1vc5msyktLU1FRe7/kI8fP15ZWVmux/79+6voLnE2cRGBigjyU4nD0K70XLPDQQ3a8NtRXTttpTalHFd4oK/mDe+u+/9E/ygAAH4vLNBPTRsESaIPJwCg7jGtKOXv76+EhAStWLGizPEVK1YoMTHR7Wvy8/Pl41M2ZGfxyTAMSVKvXr20a9cuORwO1zk7d+5UXFyc/P393V43ICBA4eHhZR6ofhaLxdXsnCV83mPhT7/p1llrdCSnUG1jQvXZA711Gf2jAAA4I+fscvpwAgDqGlOX7z388MOaPXu25s6dq6SkJD300ENKSUlxLccbP368hg4d6jp/4MCB+uSTTzRjxgzt2bNHq1at0tixY9W9e3c1atRIknT//fcrMzNTDz74oHbu3KkvvvhCL7zwgsaMGWPKPeLsaHbuPQpL7Br/yS+a8OlWFdsNXd0xVp+O7qUWkSFmhwYAgEezMYgHAKijTG3eMnjwYGVmZmrSpElKTU1Vhw4dtGzZMjVv3lySlJqaqpSUFNf5w4YNU05Ojt544w098sgjqlevni6//HK9+OKLrnOaNm2qr7/+Wg899JA6deqkxo0b68EHH9Tjjz9e4/eHP+acKcV09LotPbtAo97boI0px2WxSH/r106jWa4HAECFxJ8cxEtiEA8AUMdYDOe6N7hkZ2crIiJCWVlZLOWrZlv2H9f1b65SZKi/1j95pdnhoBps+O2Y7n9vg9JzChUe6KvXbuuivu2izQ4LAM4ZeUIpvg81Z29Gnvq+8q0CfH20fdJVsvowqAMA8GwVzRNM330P3q1tTJgsFikjt0hHcgrNDgdVbNHaFN0660eln9Y/ioIUAACV06xBsIL8rCoscWhfZp7Z4QAAUGUoSsFUQf5WtWxY2lMoKZUp6XVFUYlDEz79RU988ouK7YauujBWn9A/CgCAc2L1saitq+UB+RIAoO6gKAXT0ey8bknPKdDtb6/Rwp9STvaPaqvpd3RVaICpLewAAKjV2sfR7BwAUPdQlILp4v+/vfsOj6pM/z/+mSSkkAoJIYmEhCIQegkoBFxRlyIbEVfBQgeVlSIi6rquiyII/tYV/KJEYV3QRYV1KaIiK7p0cIGY0ENvYuglhRSSOb8/QkZDKBEyc6a8X9eVS+fknJn7DOLccz/Pcz/sKOM20g6fVfK0Ndp06KyC/X30wYBEjbjrVnnR+wIAgJvSKIpBPACA+2HqAkxXuqPMDqaju7R/bTyiPy/apsJiq+pHBmlGvzaqWyPI7LAAAHAL7FgMAHBHFKVguoRL09H3ncxRYZFVvj5M4HMlhUVWvfblDv3z+0OSpC6Na+pvvVso2L+KyZEBAOA+Sgfxjp7L0/m8iwoN4HMWAOD6+PYP090SFqBgPx9dLDa0/1SO2eHgVziZXaDH/v69rSA15rcN9F7fNhSkAACoZKEBVXRLWIAkadcxZksBANwDRSmYzmKxqFE0O8q4mvQj55Q8bY02HjyrYD8f/b1/okbdTf8oAADsxdaHk75SAAA3QVEKTsG2Ax99ElzCvzYdUe/31+tYVr7q1QjUohFJuqdxTbPDAgDArZXmSwziAQDcBT2l4BRKd5TZyXR0p3ax2KoJX+7Qh+tLluvdk1BTU/rQPwoAAEf4eWY5+RIAwD1QlIJTYPme8zuVU6CnPv5BGw6ckSSNvudWjbqL5XoAADhK6SDermPZsloNPoMBAC6PohScQsOawbJYShpnn8opUESQn9kh4Re2/HhOT/4zVZnn8xXk56O3erdQlyZRZocFAIBHqRMRKD8fL+VdLNahMxdUJyLQ7JAAALgp9JSCUwj081Fc9aqS2FHG2cxP/VEPvrdemefzVTciUIuGJ1GQAgDABN5eFjUsbXbO7HIAgBugKAWnYesrRZLlFC4WW/XK4u169rPNKiyy6u5GkVo0Ikn1I4PMDg0AAI9VugMffTgBAO6AohScBs07ncfpnAL1/fv/NHvdQUnSqLvqa2b/RIXQ0BwAAFMxiAcAcCf0lILTKN3mOOMYSZaZth09ryf/maqj5/IU6Outt/q0VFeW6wEA4BTIlwAA7oSiFJxGwqWRvz3Hc1RUbJWPNxP5HG1h2o/64/ytKiiyqk5EoGb0a6NbawabHRYAALikdPnekTN5ys6/qGBmMQMAXBjf+uE0alULUKCvtwqLrdp/KtfscDxKUbFV47/YoWfmbVZBkVWdG9bQouFJFKQAAHAy1QJ9FRXiL4nNYQAAro+iFJyGl5dFjaLpk+BoZ3IL1f8fG/SPtQckSSPvqq8PBrRVaAAjrwAAOKOEaJqdAwDcA0UpOJXSKekZJFkOse3oeSVPW6N1+06rqq+33uvbWs92aSgvL4vZoQEAgKsoHcTLYBAPAODi6CkFp8JMKcf5PP2oXpi/RfkXrYoPr6oZ/RPVgOV6AAA4vdJBPPIlAICroygFp9L40nT0jExmStlLUbFVk7/O0N/XlCzXu7NhDb3dp5VCq7JcDwAAV9D40iDermPZsloNZjgDAFwWRSk4ldKZOsey8nU2t1DVAn1Njsi9nMkt1MhPf9DavaclSU/dWU/Pdmkob5JZAABcRp2IQPl6eym3sFg/ns1T7fCqZocEAMANoacUnEqwfxXFVg+QJO08xpT0yrTjpyzd984ard1b0j9q+mOt9Xy3RhSkAABwMT7eXrq1ZpAk8iUAgGujKAWnkxBV2ryTJXyVZfHmn/RAytqS0dTqVbXwqSTd2yza7LAAAMANSqAPJwDADVCUgtOx7SjDyN9NKyq26vUlOzXq0zTlX7Sq060RWjwiSQ2jaGgOAIArs+1YzCAeAMCF0VMKTifBtqMMSdbNOHehUCM/TdPqPackScN+U0/PdaV/FAAA7iCBQTwAgBugKAWnU5pk7T6eraJiq3y8mdD3a+3MzNIT/9ykI2fyFFDFW399qLl+1zzG7LAAAEAlKZ0pdejMBeUWFCnQj7QeAOB6+LYPp1O7elUFVPFWQZFVB09fMDscl/Pllp/0wPR1OnImT7HVA7TgqQ4UpAAAcDPhQX6KDPaTYUi7jjO7HADgmihKwel4eVlsPY+Ykl5xxVZDk7/O0IhP0pR3sVidbo3QFyM62maeAQAA92Lrw0nLAwCAi6IoBaeUEF3aV4qiVEWcu1CoQbM36r2V+yRJT95RV7MGtlVYVV+TIwMAAPbycx9O8iUAgGti8TmcUgIjfxWWcSxLT3yUqsNnLsi/ipf+34MtdF8LlusBAODuaHYOAHB1FKXglBpFlSZZFKWuZcnWTI39bLMuFBarVrUAzeiXqMYxLNcDAMATNLo0szwjM1uGYchiYYddAIBroSgFp1TaU+rouTydv3BRoVWrmByRcym2GvrbN7s0fUXJcr2k+uF655HWqhbIcj0AADxF3YggVfG2KLugSD+ezVNs9apmhwQAwK9CTyk4pdCAKrolLEASU9Ivd/7CRQ2evdFWkHq8Ux19OKgdBSkAADyMr4+X6keWbg7D7HIAgOuhKAWnVdrsnCTrZ7uPZ+u+d9do5e6T8q/ipbcfbqmXejSWjzd/lQEA8ESlzc4zaHYOAHBBfJOF0yrtK8WOMiW+3pqp+99dq0OnL+iWsAD9e1gH9Wx5i9lhAQAAEzViEA8A4MLoKQWnVbqjzE4PT7KKrYamLNutd5bvlSR1qBeudx5treos1wMAwOPZ8iUG8QAALoiiFJxW6cjf7mPZKrYa8vbyvB1lzudd1Oi5aVq+66QkaUjHOnqxeyOW6wEAAEk/zyw/cDpXeYXFCvD1NjkiAAAqjm+2cFrx4YHyr+KlvIvFOnQ61+xwHG7P8Wzd/+5aLd91Un4+XprSp4Ve/h39owAAwM9qBPspIshXhlHSexIAAFfCt1s4LW8vixrW9Mw+CUu3HdP9767VgVO5uiUsQPP/0EG9WtUyOywAAOCEWMIHAHBVFKXg1EqnpHvKjjJWq6G3vtmlYXNSlVtYrNvrVtfiEUlqekuo2aEBAAAn1SjKMwfxAACuj55ScGqlfaU8odl5Vv5FPTM3Xd9lnJAkDUqK15/uTVAVlusBAIBrYMdiAICroigFp+YpSdbeE9l64qNU7T+VK18fL03q1Uy/b8NyPQAAcH22QbzMLBmGIYvF8zaHAQC4JopScGoJl5KsH8/mKSv/okL8q5gcUeVbtuO4npmXrpyCIkWH+uv9fm3UvFaY2WEBAAAXUT8ySD5eFmXlFynzfL5iwgLMDgkAgAphXRCcWlhVX0WH+kuSdrvZEj6r1dCUZbv1+EeblFNQpHZ1quuLkR0pSAEAgF/Fz8db9WoESZIyjrn37HIAgHuhKAWnV9q8052W8GXnX9QT/0zV29/tkSQN7BCvj4fepoggP5MjAwAArujnJXzuNYgHAHBvFKXg9GzbHLvJTKl9J3N0/7tr9e3O4/L18dJfH2yuV+5rQkNzAABww2z5khsN4gEA3B89peD0Gl1KsjLcIMn69lL/qOyCIkWFlPSPahEbZnZYAADAxZXOLM9wk0E8AIBnoCgFp5fwiyTLajXk5eV6O8pYrYam/Xevpny7W5LULr663n2stWoEs1wPAADcvNKZUvtP5ij/YrH8q3ibHBEAANfHeiE4vToRgfL18dKFwmIdOXvB7HB+tez8ixo2J9VWkOrfPk5zht5GQQoAAFSayGA/VQ/0ldWQ9hzPMTscAAAqhKIUnJ6Pt5ca1CzZUcbVmnfuP5mjXtPX6Zsdx+Xr7aX/9/vmGt+zqXx9+KsHAAAqj8Vi+XlzGHbgAwC4CL4ZwyU0inK95p3/zTiunu+s1d4TOaoZ4qd5T96u3m1jzQ4LAAC4qdJ8KcPFBvEAAJ6LohRcQmmfhAwXGPmzWg1N+26Phny4SdkFRUqMq6YvRnZUq9rVzA4NAODGpk+frjp16sjf319t2rTR6tWrr3ruihUrZLFYyv1kZGQ4MGJUtoToSzOlXGgQDwDg2Wh0DpeQ4CI7yuQUFGnsvzZr6fZjkqTHbqutcclNWK4HALCrefPmafTo0Zo+fbqSkpL0/vvvq3v37tqxY4dq16591et27dqlkJAQ2+MaNWo4IlzYyS8H8QzDkMXiepvDAAA8C9+U4RIaXUqyDp2+oJyCIpOjubIDp3LV6921Wrr9mHy9vTT5gWaa2KsZBSkAgN299dZbGjJkiIYOHaqEhARNnTpVsbGxSklJueZ1kZGRioqKsv14e7NjmyurHxkkby+Lzl64qBPZBWaHAwDAdfFtGS6heqCvaoaU7Fa3ywlnSy3fdUL3vbNGe07kKDLYT3OfvF0Pt7v6yDQAAJWlsLBQqamp6tKlS5njXbp00bp16655batWrRQdHa27775by5cvt2eYcAD/Kt6qGxEoSdrBEj4AgAugKAWXYWve6UR9pQzD0LvL92rw7I3Kzi9Sm7hq+nJkR7WmfxQAwEFOnTql4uJi1axZs8zxmjVr6tixY1e8Jjo6WjNmzND8+fO1YMECNWzYUHfffbdWrVp11dcpKChQVlZWmR84n9LZ5TQ7BwC4AnpKwWU0ig7Wyt0nnSbJyi0o0tjPNuvrbSUJ/6O31dYr9I8CAJjk8v5B1+op1LBhQzVs2ND2uH379jpy5IjefPNN3XHHHVe8ZtKkSXr11VcrL2DYRaOoYH2x2bkG8QAAuBq+PcNlJFyaKeUMO8ocPJWrB6av09fbjqmKt0Wv92qm1+kfBQAwQUREhLy9vcvNijpx4kS52VPXcvvtt2vPnj1X/f2LL76o8+fP236OHDlywzHDftiBDwDgSvgGDZfx844y2TIMw7Q4VlzqH7XreLZqBPtp7hO369Hb6B8FADCHr6+v2rRpo2XLlpU5vmzZMnXo0KHCz5OWlqbo6Oir/t7Pz08hISFlfuB8SvOlfSdzVVBUbHI0AABcG8v34DLq1ghUFW+LcgqK9OPZPMVWr+rQ1zcMQykr9+mv/9klw5Ba1Q7Te33bqGaIv0PjAADgcmPGjFG/fv2UmJio9u3ba8aMGTp8+LCGDRsmqWSW09GjR/XRRx9JkqZOnar4+Hg1adJEhYWFmjNnjubPn6/58+ebeRuoBFEh/goNqKLzeRe190SOmsSEmh0SAABXRVEKLqOKt5fqRwZrZ2aWdmZmObQolVtQpOf/vUVfbc2UJD3cNlav9mwiPx+2zgYAmK9Pnz46ffq0xo8fr8zMTDVt2lRLlixRXFycJCkzM1OHDx+2nV9YWKixY8fq6NGjCggIUJMmTfTVV1/p3nvvNesWUEksFosaRQXrfwfOaGdmNkUpAIBTM3353vTp01WnTh35+/urTZs2Wr169TXP//jjj9WiRQtVrVpV0dHRGjRokE6fPn3Fc+fOnSuLxaL777/fDpHDDKV9EjKOOa7Z+eHTF/T7lHX6amumqnhbNOH+ppr0QDMKUgAAp/LUU0/p4MGDKigoUGpqapmG5bNnz9aKFStsj59//nnt3btXeXl5OnPmjFavXk1Byo3YWh7QVwoA4ORMLUrNmzdPo0eP1ksvvaS0tDR16tRJ3bt3LzOS90tr1qxR//79NWTIEG3fvl2fffaZNm7cqKFDh5Y799ChQxo7dqw6depk79uAA5U2O3fUjjKrdp9U8jtrlHEsWxFBfvr08dvV9/a4q+5mBAAAYDYzBvEAALgRphal3nrrLQ0ZMkRDhw5VQkKCpk6dqtjYWKWkpFzx/O+//17x8fEaNWqU6tSpo44dO+rJJ5/Upk2bypxXXFysxx57TK+++qrq1q3riFuBgzSy7Shj3yTLMAy9t3KfBs7aoPN5F9UiNkxfjuyoxPjqdn1dAACAm9XIwYN4AADcKNOKUoWFhUpNTVWXLl3KHO/SpYvWrVt3xWs6dOigH3/8UUuWLJFhGDp+/Lj+/e9/q0ePHmXOGz9+vGrUqKEhQ4bYLX6Yo3Q6+sHTubpQWGSX17hQWKSRn6Zp8tcZshpS78RamvfE7YoKpaE5AABwfg1qBsvLIp3KKdSJ7HyzwwEA4KpMa3R+6tQpFRcXq2bNmmWO16xZU8eOHbviNR06dNDHH3+sPn36KD8/X0VFRbrvvvs0bdo02zlr167VBx98oPT09ArHUlBQoIKCAtvjrCxGlZxVRJCfIoL8dCqnQLuP56hlbFilPv+RMxf0+EeblHEsWz5eFo1LbsxyPQAA4FICfL0VHxGo/SdzlZGZrchgBtYAAM7J9Ebnl3/ZNwzjqgWAHTt2aNSoUfrLX/6i1NRULV26VAcOHLBtd5ydna2+fftq5syZioiIqHAMkyZNUmhoqO0nNjb2xm8IdpdgW8JXucXDNXtO/aJ/lK8+efx29WsfT0EKAAC4HEf34QQA4EaYNlMqIiJC3t7e5WZFnThxotzsqVKTJk1SUlKSnnvuOUlS8+bNFRgYqE6dOmnChAk6fvy4Dh48qOTkZNs1VqtVkuTj46Ndu3apXr165Z73xRdf1JgxY2yPs7KyKEw5sYToEK3ec6rSdpQxDEMzV++3LddrUStUKX3bKCYsoFKeHwAAwNESooP11dZMu/fhBADgZphWlPL19VWbNm20bNky9erVy3Z82bJl6tmz5xWvuXDhgnx8yobs7e0tqaSw0KhRI23durXM7//85z8rOztbb7/99lULTX5+fvLz87uZ24EDNYq6NFOqEnaUySss1gvzt2jx5p8kSQ+2qaUJ9zeVfxXvm35uAAAAs5Q2O6/smeUAAFQm04pSkjRmzBj169dPiYmJat++vWbMmKHDhw/bluO9+OKLOnr0qD766CNJUnJysh5//HGlpKSoa9euyszM1OjRo9WuXTvFxMRIkpo2bVrmNcLCwq54HK7LtqNMZtY1l3tez5EzF/TkP1O1IzNL3l4W/eV3jdW/Pf2jAACA6yvdsXjfyRwVFlnl62N61w4AAMoxtSjVp08fnT59WuPHj1dmZqaaNm2qJUuWKC4uTpKUmZmpw4cP284fOHCgsrOz9c477+jZZ59VWFiY7rrrLr3xxhtm3QJMUD8ySD5eFmXlF+mn8/m65QaW2a3de0ojPvlBZy9cVHigr959rLVurxtuh2gBAAAc75awAAX7+yg7v0j7TubYdjAGAMCZWAzDMMwOwtlkZWUpNDRU58+fV0gIH+DOqNvUVco4lq0PBiTq7oQr9yC7EsMw9MGaA3p9yU5ZDanZLaF6vx/9owAAFUeeUIL3wfn1fm+9Nhw8oyl9WqhXq1pmhwMA8CAVzROYxwuXVNpXKuNX9JXKKyzWM/PSNeGrkoLUA61v0WfD2lOQAgAAbql0CV8Gzc4BAE7K1OV7wI1qFB0ipf+kHRVs3vnj2ZL+Udt/Kukf9eceCRrYIZ7+UQAAwG2V9uGsaL4EAICjUZSCSyrti5BRgSRr3b5TGvFJms7kFqp6oK/efbS12tejfxQAAHBvCdG/fmY5AACORFEKLinh0vK9A6dylX+xWP5VvMudYxiGZq09qIlLdqrYaqhJTIje79dGtapVdXS4AAAADtegZrAsFulkdoFO5RQoIsjP7JAAACiDnlJwSTWC/VQ90FdWQ9p9vPzoX/7FYj37r80a/+UOFVsN9Wp1i+b/oQMFKQAA4DEC/XwUV70k96GvFADAGVGUgkuyWCw/T0m/LMk6ei5PD723XgvSjsrby6KXf9dYb/VuccXZVAAAAO7M1vLgGH2lAADOh6IUXFaDmiVFqa+2/qT1+06r2Gro+/2ndd+0Ndp69LyqVa2ifw5upyEd69DQHAAAeKTSfOmb7cdt+RIAwLMVWw2t33dan6cfNf2zgZ5ScElLt2VqwQ9HJUkrd5/Syt2nFOLvo5yCIlkNqXF0Sf+o2Oos1wMAAJ5p6bZMfbT+oCRpw8EzemTm94oO9de45Mbq1jTa3OAAAKZYui1Tr36xQ5nn823HzPxsYKYUXM7SbZn6w5wfdD7vYpnjWfklBam28dU0/w8dKEgBAACPVZovnb1QNl86dj5ff5jzg5ZuyzQpMgCAWUo/G35ZkJLM/WxgphRcSrHV0Ktf7NC1Jhf+eDZPvj7UWwEAgGe6Vr5UeuyPC7aqsMgqLy9aHACAJ7BaDf1l8farfjZYJL36xQ79tnGUvB342UBRCi5lw4Ez5aq6l8s8n68NB86ofb1wB0UFAADgPCqSL527cFGj5qY7JiAAgNMzZM53aYpScCknsq+dYP3a8wAAANxNRfOg+pGBigjys3M0AABncCqnQHtP5F73PEd/l6YoBZcSGexfqecBAAC4m4rmQa/1bMbMcgDwEOv3ndYjM7+/7nmO/i5N4x24lHZ1qis61F9XW+FqUcnOAe3qVHdkWAAAAE6DfAkAcDln/WygKAWX4u1l0bjkxpJU7i9T6eNxyY0d2pgNAADAmZAvAQAu56yfDRSl4HK6NY1WSt/WigotO60wKtRfKX1bq1vTaJMiAwAAcA7kSwCAyznjZ4PFMIwr7Qjo0bKyshQaGqrz588rJCTE7HBwFcVWQxsOnNGJ7HxFBpdMM2TEDwBgb+QJJXgfXAP5EgDgco74bKhonkCjc7gsby8LzTkBAACugXwJAHA5Z/psYPkeAAAAAAAAHI6iFAAAAAAAAByOohQAAAAAAAAcjqIUAAAAAAAAHI6iFAAAAAAAAByOohQAAAAAAAAcjqIUAAAAAAAAHI6iFAAAAAAAAByOohQAAAAAAAAcjqIUAAAAAAAAHI6iFAAAAAAAAByOohQAAAAAAAAcjqIUAAAAAAAAHI6iFAAAAAAAAByOohQAAAAAAAAczsfsAJyRYRiSpKysLJMjAQAAzqY0PyjNFzwV+RIAALiaiuZLFKWuIDs7W5IUGxtrciQAAMBZZWdnKzQ01OwwTEO+BAAArud6+ZLF8PRhviuwWq366aefFBwcLIvFYnY4dpeVlaXY2FgdOXJEISEhZodjd9yve+N+3Rv3695c5X4Nw1B2drZiYmLk5eW5nRDIl9wb9+veuF/3xv26N1e534rmS8yUugIvLy/VqlXL7DAcLiQkxKn/o65s3K97437dG/fr3lzhfj15hlQp8iXPwP26N+7XvXG/7s0V7rci+ZLnDu8BAAAAAADANBSlAAAAAAAA4HAUpSA/Pz+NGzdOfn5+ZofiENyve+N+3Rv369487X7hWjztv0/u171xv+6N+3Vv7na/NDoHAAAAAACAwzFTCgAAAAAAAA5HUQoAAAAAAAAOR1EKAAAAAAAADkdRyoOtWrVKycnJiomJkcVi0aJFi8wOyW4mTZqktm3bKjg4WJGRkbr//vu1a9cus8Oym5SUFDVv3lwhISEKCQlR+/bt9fXXX5sdlsNMmjRJFotFo0ePNjsUu3jllVdksVjK/ERFRZkdll0dPXpUffv2VXh4uKpWraqWLVsqNTXV7LDsJj4+vtyfscVi0fDhw80OrdIVFRXpz3/+s+rUqaOAgADVrVtX48ePl9VqNTs0QBL5kjvnS5Jn50zuni9J5EzunjN5Ur4kuW/O5GN2ADBPbm6uWrRooUGDBun3v/+92eHY1cqVKzV8+HC1bdtWRUVFeumll9SlSxft2LFDgYGBZodX6WrVqqXJkyerfv36kqQPP/xQPXv2VFpampo0aWJydPa1ceNGzZgxQ82bNzc7FLtq0qSJvv32W9tjb29vE6Oxr7NnzyopKUmdO3fW119/rcjISO3bt09hYWFmh2Y3GzduVHFxse3xtm3b9Nvf/lYPPfSQiVHZxxtvvKH33ntPH374oZo0aaJNmzZp0KBBCg0N1dNPP212eAD5khvnS5Ln5kyeki9J5EzunDN5Ur4kuW/ORFHKg3Xv3l3du3c3OwyHWLp0aZnHs2bNUmRkpFJTU3XHHXeYFJX9JCcnl3k8ceJEpaSk6Pvvv3frBCsnJ0ePPfaYZs6cqQkTJpgdjl35+Pi4/UhfqTfeeEOxsbGaNWuW7Vh8fLx5ATlAjRo1yjyePHmy6tWrp9/85jcmRWQ/69evV8+ePdWjRw9JJX+2n376qTZt2mRyZEAJ8iX3zZckz8yZPClfksiZ3Dln8qR8SXLfnInle/BI58+flyRVr17d5Ejsr7i4WHPnzlVubq7at29vdjh2NXz4cPXo0UP33HOP2aHY3Z49exQTE6M6dero4Ycf1v79+80OyW4WL16sxMREPfTQQ4qMjFSrVq00c+ZMs8NymMLCQs2ZM0eDBw+WxWIxO5xK17FjR3333XfavXu3JGnz5s1as2aN7r33XpMjA+BJ+ZLkOTmTJ+VLEjmTp+RM7p4vSe6bMzFTCh7HMAyNGTNGHTt2VNOmTc0Ox262bt2q9u3bKz8/X0FBQVq4cKEaN25sdlh2M3fuXP3www/auHGj2aHY3W233aaPPvpIDRo00PHjxzVhwgR16NBB27dvV3h4uNnhVbr9+/crJSVFY8aM0Z/+9Cdt2LBBo0aNkp+fn/r37292eHa3aNEinTt3TgMHDjQ7FLt44YUXdP78eTVq1Eje3t4qLi7WxIkT9cgjj5gdGuDRPCVfkjwrZ/KkfEkiZ/KknMnd8yXJfXMmilLwOCNGjNCWLVu0Zs0as0Oxq4YNGyo9PV3nzp3T/PnzNWDAAK1cudItk6wjR47o6aef1jfffCN/f3+zw7G7Xy4jadasmdq3b6969erpww8/1JgxY0yMzD6sVqsSExP1+uuvS5JatWql7du3KyUlxe0TLEn64IMP1L17d8XExJgdil3MmzdPc+bM0SeffKImTZooPT1do0ePVkxMjAYMGGB2eIDH8pR8SfKcnMnT8iWJnMmTciZ3z5ck982ZKErBo4wcOVKLFy/WqlWrVKtWLbPDsStfX19b087ExERt3LhRb7/9tt5//32TI6t8qampOnHihNq0aWM7VlxcrFWrVumdd95RQUGBWze1DAwMVLNmzbRnzx6zQ7GL6Ojocl8MEhISNH/+fJMicpxDhw7p22+/1YIFC8wOxW6ee+45/fGPf9TDDz8sqeRLw6FDhzRp0iSXTrAAV+ZJ+ZLkOTmTp+dLEjmTu/KEfEly35yJohQ8gmEYGjlypBYuXKgVK1aoTp06ZofkcIZhqKCgwOww7OLuu+/W1q1byxwbNGiQGjVqpBdeeMHtE6yCggLt3LlTnTp1MjsUu0hKSiq3Jfnu3bsVFxdnUkSOU9pkuLShpTu6cOGCvLzKtrj09vZ2+e2NAVdEvlTCXXMmT8+XJHImd+UJ+ZLkvjkTRSkPlpOTo71799oeHzhwQOnp6apevbpq165tYmSVb/jw4frkk0/0+eefKzg4WMeOHZMkhYaGKiAgwOToKt+f/vQnde/eXbGxscrOztbcuXO1YsWKcrvquIvg4OBy/S4CAwMVHh7uln0wxo4dq+TkZNWuXVsnTpzQhAkTlJWV5dIjJNfyzDPPqEOHDnr99dfVu3dvbdiwQTNmzNCMGTPMDs2urFarZs2apQEDBsjHx30/rpOTkzVx4kTVrl1bTZo0UVpamt566y0NHjzY7NAASeRLkvvmS5Jn5Uyeli9J5EyekDN5Sr4kuXHOZMBjLV++3JBU7mfAgAFmh1bprnSfkoxZs2aZHZpdDB482IiLizN8fX2NGjVqGHfffbfxzTffmB2WQ/3mN78xnn76abPDsIs+ffoY0dHRRpUqVYyYmBjjgQceMLZv3252WHb1xRdfGE2bNjX8/PyMRo0aGTNmzDA7JLv7z3/+Y0gydu3aZXYodpWVlWU8/fTTRu3atQ1/f3+jbt26xksvvWQUFBSYHRpgGAb5kjvnS4ZBzuTO+ZJhkDN5Qs7kKfmSYbhvzmQxDMNwXAkMAAAAAAAAkLyufwoAAAAAAABQuShKAQAAAAAAwOEoSgEAAAAAAMDhKEoBAAAAAADA4ShKAQAAAAAAwOEoSgEAAAAAAMDhKEoBAAAAAADA4ShKAQAAAAAAwOEoSgGAnRQWFqp+/fpau3btDV1/8OBBWSwWpaenS5JWrFghi8Wic+fOSZJmz56tsLAw2/mvvPKKWrZseXNB/wpbt25VrVq1lJub67DXBAAA7oV8CfBsFKUASJIGDhwoi8Uii8WiKlWqqG7duho7dqxLf4DGx8dr6tSppr3+jBkzFBcXp6SkJNux0vf4+++/L3NuQUGBwsPDZbFYtGLFCklSbGysMjMz1bRp0wq93tixY/Xdd99VWvzX06xZM7Vr105Tpkxx2GsCAGAm8qXKR74EeDaKUgBsunXrpszMTO3fv18TJkzQ9OnTNXbs2Bt6LsMwVFRUVMkRmqOwsPCGrps2bZqGDh1a7nhsbKxmzZpV5tjChQsVFBRU5pi3t7eioqLk4+NTodcLCgpSeHj4DcV6owYNGqSUlBQVFxc79HUBADAL+dKVkS9dHfkScHUUpQDY+Pn5KSoqSrGxsXr00Uf12GOPadGiRZKkOXPmKDExUcHBwYqKitKjjz6qEydO2K4tnSr9n//8R4mJifLz89Pq1au1b98+9ezZUzVr1lRQUJDatm2rb7/9tszrxsfHa8KECerfv7+CgoIUFxenzz//XCdPnlTPnj0VFBSkZs2aadOmTWWuW7dune644w4FBAQoNjZWo0aNso1U3nnnnTp06JCeeeYZ22hbRa77ZTwDBw5UaGioHn/8cRUWFmrEiBGKjo6Wv7+/4uPjNWnSpKu+lz/88IP27t2rHj16lPvdgAEDNHfuXOXl5dmO/eMf/9CAAQPKnHf5dPTruXw6utVq1fjx41WrVi35+fmpZcuWWrp0abnnX7BggTp37qyqVauqRYsWWr9+ve2cQ4cOKTk5WdWqVVNgYKCaNGmiJUuW2H7ftWtXnT59WitXrqxQjAAAuDrypbLxkC+RLwE3g6IUgKsKCAjQxYsXJZWMfr322mvavHmzFi1apAMHDmjgwIHlrnn++ec1adIk7dy5U82bN1dOTo7uvfdeffvtt0pLS1PXrl2VnJysw4cPl7luypQpSkpKUlpamnr06KF+/fqpf//+6tu3r3744QfVr19f/fv3l2EYkkrW53ft2lUPPPCAtmzZonnz5mnNmjUaMWKEJGnBggWqVauWxo8fr8zMTGVmZlboulJ//etf1bRpU6Wmpurll1/W//3f/2nx4sX617/+pV27dmnOnDmKj4+/6nu3atUqNWjQQCEhIeV+16ZNG9WpU0fz58+XJB05ckSrVq1Sv379KvYHU0Fvv/22/va3v+nNN9/Uli1b1LVrV913333as2dPmfNeeukljR07Vunp6WrQoIEeeeQR26jt8OHDVVBQoFWrVmnr1q164403yoxQ+vr6qkWLFlq9enWlxg4AgKsgXyJfIl8CboIBAIZhDBgwwOjZs6ft8f/+9z8jPDzc6N279xXP37BhgyHJyM7ONgzDMJYvX25IMhYtWnTd12rcuLExbdo02+O4uDijb9++tseZmZmGJOPll1+2HVu/fr0hycjMzDQMwzD69etnPPHEE2Wed/Xq1YaXl5eRl5dne94pU6aUOaei191///1lzhk5cqRx1113GVar9br3ZxiG8fTTTxt33XVXueOSjIULFxpTp041OnfubBiGYbz66qtGr169jLNnzxqSjOXLlxuGYRgHDhwwJBlpaWmGYfz8Hp89e9YwDMOYNWuWERoaanvucePGGS1atLA9jomJMSZOnFjm9du2bWs89dRTZZ7/73//u+3327dvNyQZO3fuNAzDMJo1a2a88sor17zXXr16GQMHDrzuewIAgKsjXyJfMgzyJaAyMVMKgM2XX36poKAg+fv7q3379rrjjjs0bdo0SVJaWpp69uypuLg4BQcH684775SkciN4iYmJZR7n5ubq+eefV+PGjRUWFqagoCBlZGSUu6558+a2f69Zs6akksaQlx8rnQKfmpqq2bNnKygoyPbTtWtXWa1WHThw4Kr3WNHrLr+PgQMHKj09XQ0bNtSoUaP0zTffXP2NlJSXlyd/f/+r/r5v375av3699u/fr9mzZ2vw4MHXfL5fKysrSz/99FOZpqGSlJSUpJ07d5Y59sv3Pjo6WtLP7/OoUaM0YcIEJSUlady4cdqyZUu51woICNCFCxcqNX4AAJwV+RL5EvkSUHkoSgGw6dy5s9LT07Vr1y7l5+drwYIFioyMVG5urrp06aKgoCDNmTNHGzdu1MKFCyWVb2oZGBhY5vFzzz2n+fPna+LEiVq9erXS09PVrFmzctdVqVLF9u+l/QyudMxqtdr++eSTTyo9Pd32s3nzZu3Zs0f16tW76j1W9LrL76N169Y6cOCAXnvtNeXl5al379568MEHr/o6EREROnv27FV/Hx4ert/97ncaMmSI8vPz1b1796ueezN+2RtCKmmoevmxa73PQ4cO1f79+9WvXz9t3bpViYmJtsS71JkzZ1SjRg17hA8AgNMhXyJfIl8CKk/FtigA4BECAwNVv379csczMjJ06tQpTZ48WbGxsZJUronm1axevVoDBw5Ur169JEk5OTk6ePDgTcfaunVrbd++/YrxlvL19S23y0lFrruakJAQ9enTR3369NGDDz6obt266cyZM6pevXq5c1u1aqWUlJQrJjWlBg8erHvvvVcvvPCCvL29f3U814s1JiZGa9as0R133GE7vm7dOrVr1+5XPVdsbKyGDRumYcOG6cUXX9TMmTM1cuRI2++3bdt2zYQTAAB3Qr50beRL5EvAr0FRCsB11a5dW76+vpo2bZqGDRumbdu26bXXXqvQtfXr19eCBQuUnJwsi8Wil19+2TaqdDNeeOEF3X777Ro+fLgef/xxBQYGaufOnVq2bJltZCo+Pl6rVq3Sww8/LD8/P0VERFTouiuZMmWKoqOj1bJlS3l5eemzzz5TVFSUwsLCrnh+586dlZubq+3bt6tp06ZXPKdbt246efLkFZt7VobnnntO48aNU7169dSyZUvNmjVL6enp+vjjjyv8HKNHj1b37t3VoEEDnT17Vv/973+VkJBg+/3Bgwd19OhR3XPPPfa4BQAAXAb5EvkS+RLw67F8D8B11ahRQ7Nnz9Znn32mxo0ba/LkyXrzzTcrdO2UKVNUrVo1dejQQcnJyeratatat2590zE1b95cK1eu1J49e9SpUye1atVKL7/8sm2NvySNHz9eBw8eVL169WzTpSty3ZUEBQXpjTfeUGJiotq2bauDBw9qyZIl8vK68v9Gw8PD9cADD1wzobFYLIqIiJCvr+8NvAPXN2rUKD377LN69tln1axZMy1dulSLFy/WrbfeWuHnKC4u1vDhw5WQkKBu3bqpYcOGmj59uu33n376qbp06aK4uDh73AIAAC6DfIl8iXwJ+PUshnFpv1AAQKXaunWr7rnnHu3du1fBwcFmh1PpCgoKdOutt+rTTz8t1yAUAACgIsiXAM9GUQoA7OjDDz9U69aty+yM4y52796t5cuX68knnzQ7FAAA4MLIlwDPRVEKAAAAAAAADkdPKQAAAAAAADgcRSkAAAAAAAA4HEUpAAAAAAAAOBxFKQAAAAAAADgcRSkAAAAAAAA4HEUpAAAAAAAAOBxFKQAAAAAAADgcRSkAAAAAAAA4HEUpAAAAAAAAOBxFKQAAAAAAADjc/weAqf8q5Ji2ZAAAAABJRU5ErkJggg==",
      "text/plain": [
       "<Figure size 1200x600 with 2 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "# 第一次实验结果\n",
    "run_scaling_experiments()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "0ee20f7d",
   "metadata": {
    "scrolled": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Using device: cuda\n",
      "\n",
      "=== Running experiment for Tiny model ===\n",
      "Total trainable parameters: 0.40M\n",
      "cuda\n",
      "loading data...\n",
      "\n",
      "=== Starting Pretraining ===\n",
      "Pretrain Epoch: 000, TSNE Loss: 0.7912\n",
      "Pretrain Epoch: 005, TSNE Loss: 0.7085\n",
      "Pretrain Epoch: 010, TSNE Loss: 0.6774\n",
      "Pretrain Epoch: 015, TSNE Loss: 0.5793\n",
      "Pretrain Epoch: 020, TSNE Loss: 0.4664\n",
      "Pretrain Epoch: 025, TSNE Loss: 0.3291\n",
      "Pretrain Epoch: 030, TSNE Loss: 0.2177\n",
      "Pretrain Epoch: 035, TSNE Loss: 0.0699\n",
      "Pretrain Epoch: 040, TSNE Loss: -0.0107\n",
      "Pretrain Epoch: 045, TSNE Loss: -0.0276\n",
      "Pretrain Epoch: 050, TSNE Loss: -0.0320\n",
      "Pretrain Epoch: 055, TSNE Loss: -0.0131\n",
      "Pretrain Epoch: 060, TSNE Loss: -0.0011\n",
      "Pretrain Epoch: 065, TSNE Loss: 0.0113\n",
      "Pretrain Epoch: 070, TSNE Loss: 0.0195\n",
      "Pretrain Epoch: 075, TSNE Loss: 0.0316\n",
      "Pretrain early stopping at epoch 76\n",
      "\n",
      "=== Starting Fine-tuning ===\n",
      "Epoch: 000 | Loss: 1.6339 | Val AUC: 0.8629 | Val F1: 0.8260\n",
      "Epoch: 005 | Loss: 0.0017 | Val AUC: 0.8936 | Val F1: 0.8706\n",
      "Epoch: 010 | Loss: 0.0235 | Val AUC: 0.9074 | Val F1: 0.9198\n",
      "Epoch: 015 | Loss: 0.0657 | Val AUC: 0.8934 | Val F1: 0.8343\n",
      "Epoch: 020 | Loss: 0.0378 | Val AUC: 0.8979 | Val F1: 0.9021\n",
      "Epoch: 025 | Loss: 0.4468 | Val AUC: 0.9522 | Val F1: 0.9208\n",
      "Epoch: 030 | Loss: 0.2843 | Val AUC: 0.9342 | Val F1: 0.8755\n",
      "Epoch: 035 | Loss: 0.1016 | Val AUC: 0.9352 | Val F1: 0.9085\n",
      "Epoch: 040 | Loss: 0.0076 | Val AUC: 0.9196 | Val F1: 0.9172\n",
      "Epoch: 045 | Loss: 0.0850 | Val AUC: 0.9357 | Val F1: 0.9202\n",
      "Epoch: 050 | Loss: 0.0587 | Val AUC: 0.9304 | Val F1: 0.9145\n",
      "Epoch: 055 | Loss: 0.0794 | Val AUC: 0.9037 | Val F1: 0.9192\n",
      "Epoch: 060 | Loss: 0.1907 | Val AUC: 0.9419 | Val F1: 0.9108\n",
      "Epoch: 065 | Loss: 0.0046 | Val AUC: 0.9270 | Val F1: 0.8935\n",
      "Epoch: 070 | Loss: 0.1654 | Val AUC: 0.8804 | Val F1: 0.8435\n",
      "Epoch: 075 | Loss: 0.3901 | Val AUC: 0.9243 | Val F1: 0.9081\n",
      "Epoch: 080 | Loss: 0.1418 | Val AUC: 0.9415 | Val F1: 0.9134\n",
      "Epoch 00017: reducing learning rate of group 0 to 2.5000e-04.\n",
      "Epoch: 085 | Loss: 0.0747 | Val AUC: 0.9455 | Val F1: 0.9238\n",
      "Epoch: 090 | Loss: 0.0160 | Val AUC: 0.9456 | Val F1: 0.9213\n",
      "Epoch: 095 | Loss: 0.0761 | Val AUC: 0.9078 | Val F1: 0.9245\n",
      "Epoch: 100 | Loss: 0.0197 | Val AUC: 0.9318 | Val F1: 0.9053\n",
      "Epoch: 105 | Loss: 0.1895 | Val AUC: 0.9090 | Val F1: 0.8730\n",
      "Epoch: 110 | Loss: 0.0879 | Val AUC: 0.9019 | Val F1: 0.9113\n",
      "Epoch: 115 | Loss: 0.1223 | Val AUC: 0.9374 | Val F1: 0.9113\n",
      "Epoch: 120 | Loss: 0.0848 | Val AUC: 0.9476 | Val F1: 0.9030\n",
      "Epoch: 125 | Loss: 0.0573 | Val AUC: 0.9357 | Val F1: 0.8996\n",
      "Epoch: 130 | Loss: 0.1613 | Val AUC: 0.8921 | Val F1: 0.8640\n",
      "Epoch: 135 | Loss: 0.1025 | Val AUC: 0.9249 | Val F1: 0.9009\n",
      "Epoch 00028: reducing learning rate of group 0 to 1.2500e-04.\n",
      "Epoch: 140 | Loss: 0.0086 | Val AUC: 0.9509 | Val F1: 0.9134\n",
      "Epoch: 145 | Loss: 0.0977 | Val AUC: 0.9552 | Val F1: 0.9176\n",
      "Epoch: 150 | Loss: 0.0134 | Val AUC: 0.9381 | Val F1: 0.8919\n",
      "Epoch: 155 | Loss: 0.0125 | Val AUC: 0.9307 | Val F1: 0.9202\n",
      "Epoch: 160 | Loss: 0.0636 | Val AUC: 0.9435 | Val F1: 0.9161\n",
      "Epoch: 165 | Loss: 0.0370 | Val AUC: 0.9565 | Val F1: 0.9124\n",
      "Epoch: 170 | Loss: 0.0598 | Val AUC: 0.9461 | Val F1: 0.9222\n",
      "Epoch: 175 | Loss: 0.1594 | Val AUC: 0.9598 | Val F1: 0.9084\n",
      "Epoch: 180 | Loss: 0.0953 | Val AUC: 0.9223 | Val F1: 0.9113\n",
      "Epoch: 185 | Loss: 0.0772 | Val AUC: 0.9530 | Val F1: 0.9176\n",
      "Epoch: 190 | Loss: 0.1587 | Val AUC: 0.9377 | Val F1: 0.8996\n",
      "Epoch: 195 | Loss: 0.0817 | Val AUC: 0.9477 | Val F1: 0.9091\n",
      "Epoch: 200 | Loss: 0.1535 | Val AUC: 0.9450 | Val F1: 0.9175\n",
      "Epoch: 205 | Loss: 0.0441 | Val AUC: 0.9291 | Val F1: 0.9134\n",
      "Epoch: 210 | Loss: 0.0391 | Val AUC: 0.9504 | Val F1: 0.9021\n",
      "Epoch: 215 | Loss: 0.0479 | Val AUC: 0.9489 | Val F1: 0.9176\n",
      "Epoch: 220 | Loss: 0.0145 | Val AUC: 0.9273 | Val F1: 0.9156\n",
      "Epoch: 225 | Loss: 0.0114 | Val AUC: 0.9558 | Val F1: 0.9161\n",
      "Epoch: 230 | Loss: 0.2564 | Val AUC: 0.9496 | Val F1: 0.8840\n",
      "Epoch 00047: reducing learning rate of group 0 to 6.2500e-05.\n",
      "Epoch: 235 | Loss: 0.0185 | Val AUC: 0.9344 | Val F1: 0.9113\n",
      "Epoch: 240 | Loss: 0.1242 | Val AUC: 0.9360 | Val F1: 0.9009\n",
      "Epoch: 245 | Loss: 0.2137 | Val AUC: 0.9483 | Val F1: 0.9191\n",
      "Epoch: 250 | Loss: 0.0103 | Val AUC: 0.9560 | Val F1: 0.9182\n",
      "Epoch: 255 | Loss: 0.0200 | Val AUC: 0.9276 | Val F1: 0.9166\n",
      "Epoch: 260 | Loss: 0.1124 | Val AUC: 0.9615 | Val F1: 0.9154\n",
      "Epoch: 265 | Loss: 0.0212 | Val AUC: 0.9554 | Val F1: 0.9107\n",
      "Epoch: 270 | Loss: 0.1522 | Val AUC: 0.9507 | Val F1: 0.9165\n",
      "Epoch: 275 | Loss: 0.1650 | Val AUC: 0.9487 | Val F1: 0.8993\n",
      "Epoch: 280 | Loss: 0.1278 | Val AUC: 0.9498 | Val F1: 0.8999\n",
      "Epoch: 285 | Loss: 0.1279 | Val AUC: 0.9446 | Val F1: 0.8831\n",
      "Epoch: 290 | Loss: 0.2065 | Val AUC: 0.9352 | Val F1: 0.8887\n",
      "Epoch: 295 | Loss: 0.0089 | Val AUC: 0.9447 | Val F1: 0.9192\n",
      "\n",
      "=== Final Test Results ===\n",
      "Test AUC: 0.9606 | Test AP: 0.8662 | Test F1: 0.9099 | G-mean: 0.8907\n",
      "\n",
      "=== Running experiment for Small model ===\n",
      "Total trainable parameters: 1.79M\n",
      "cuda\n",
      "loading data...\n",
      "\n",
      "=== Starting Pretraining ===\n",
      "Pretrain Epoch: 000, TSNE Loss: 0.7515\n",
      "Pretrain Epoch: 005, TSNE Loss: 0.6315\n",
      "Pretrain Epoch: 010, TSNE Loss: 0.5320\n",
      "Pretrain Epoch: 015, TSNE Loss: 0.3828\n",
      "Pretrain Epoch: 020, TSNE Loss: 0.2131\n",
      "Pretrain Epoch: 025, TSNE Loss: 0.0186\n",
      "Pretrain Epoch: 030, TSNE Loss: -0.1628\n",
      "Pretrain Epoch: 035, TSNE Loss: -0.2943\n",
      "Pretrain Epoch: 040, TSNE Loss: -0.4463\n",
      "Pretrain Epoch: 045, TSNE Loss: -0.6080\n",
      "Pretrain Epoch: 050, TSNE Loss: -0.7485\n",
      "Pretrain Epoch: 055, TSNE Loss: -0.8857\n",
      "Pretrain Epoch: 060, TSNE Loss: -0.9884\n",
      "Pretrain Epoch: 065, TSNE Loss: -1.0917\n",
      "Pretrain Epoch: 070, TSNE Loss: -1.1598\n",
      "Pretrain Epoch: 075, TSNE Loss: -1.2430\n",
      "Pretrain Epoch: 080, TSNE Loss: -1.3043\n",
      "Pretrain Epoch: 085, TSNE Loss: -1.3658\n",
      "Pretrain Epoch: 090, TSNE Loss: -1.4224\n",
      "Pretrain Epoch: 095, TSNE Loss: -1.4604\n",
      "Pretrain Epoch: 100, TSNE Loss: -1.5042\n",
      "Pretrain Epoch: 105, TSNE Loss: -1.5312\n",
      "Pretrain Epoch: 110, TSNE Loss: -1.5562\n",
      "Pretrain Epoch: 115, TSNE Loss: -1.6111\n",
      "Pretrain Epoch: 120, TSNE Loss: -1.6434\n",
      "Pretrain Epoch: 125, TSNE Loss: -1.6619\n",
      "Pretrain Epoch: 130, TSNE Loss: -1.6914\n",
      "Pretrain Epoch: 135, TSNE Loss: -1.6871\n",
      "Pretrain Epoch: 140, TSNE Loss: -1.7257\n",
      "Pretrain Epoch: 145, TSNE Loss: -1.7270\n",
      "Pretrain Epoch: 150, TSNE Loss: -1.7835\n",
      "Pretrain Epoch: 155, TSNE Loss: -1.7791\n",
      "Pretrain Epoch: 160, TSNE Loss: -1.8148\n",
      "Pretrain Epoch: 165, TSNE Loss: -1.8445\n",
      "Pretrain Epoch: 170, TSNE Loss: -1.8480\n",
      "Pretrain Epoch: 175, TSNE Loss: -1.8520\n",
      "Pretrain Epoch: 180, TSNE Loss: -1.8777\n",
      "Pretrain Epoch: 185, TSNE Loss: -1.8699\n",
      "Pretrain Epoch: 190, TSNE Loss: -1.9162\n",
      "Pretrain Epoch: 195, TSNE Loss: -1.9116\n",
      "\n",
      "=== Starting Fine-tuning ===\n",
      "Epoch: 000 | Loss: 1775.4353 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 005 | Loss: 25.5887 | Val AUC: 0.5521 | Val F1: 0.4751\n",
      "Epoch: 010 | Loss: 0.5512 | Val AUC: 0.5698 | Val F1: 0.4751\n",
      "Epoch: 015 | Loss: 0.7827 | Val AUC: 0.5683 | Val F1: 0.4751\n",
      "Epoch: 020 | Loss: 0.6320 | Val AUC: 0.5790 | Val F1: 0.4751\n",
      "Epoch: 025 | Loss: 0.3302 | Val AUC: 0.5692 | Val F1: 0.4751\n",
      "Epoch: 030 | Loss: 0.3206 | Val AUC: 0.5603 | Val F1: 0.4751\n",
      "Epoch: 035 | Loss: 0.5216 | Val AUC: 0.5747 | Val F1: 0.4751\n",
      "Epoch: 040 | Loss: 0.3566 | Val AUC: 0.5727 | Val F1: 0.4751\n",
      "Epoch: 045 | Loss: 0.3856 | Val AUC: 0.5715 | Val F1: 0.4751\n",
      "Epoch: 050 | Loss: 1.7343 | Val AUC: 0.5645 | Val F1: 0.4751\n",
      "Epoch: 055 | Loss: 0.2672 | Val AUC: 0.5596 | Val F1: 0.4751\n",
      "Epoch: 060 | Loss: 0.4663 | Val AUC: 0.4230 | Val F1: 0.4751\n",
      "Epoch: 065 | Loss: 0.2929 | Val AUC: 0.5714 | Val F1: 0.4751\n",
      "Epoch: 070 | Loss: 0.3030 | Val AUC: 0.5821 | Val F1: 0.4751\n",
      "Epoch: 075 | Loss: 0.3366 | Val AUC: 0.5831 | Val F1: 0.4751\n",
      "Epoch: 080 | Loss: 0.2471 | Val AUC: 0.5832 | Val F1: 0.4751\n",
      "Epoch: 085 | Loss: 0.2367 | Val AUC: 0.5853 | Val F1: 0.4751\n",
      "Epoch: 090 | Loss: 0.3845 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 095 | Loss: 0.4138 | Val AUC: 0.6147 | Val F1: 0.4751\n",
      "Epoch: 100 | Loss: 0.2930 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 105 | Loss: 0.3228 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 110 | Loss: 0.3472 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 115 | Loss: 0.2534 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 120 | Loss: 0.3275 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 125 | Loss: 0.2446 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 130 | Loss: 0.4469 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 135 | Loss: 0.4056 | Val AUC: 0.5000 | Val F1: 0.4751\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch: 140 | Loss: 0.3140 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 145 | Loss: 0.3698 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 150 | Loss: 0.2780 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch 00031: reducing learning rate of group 0 to 2.5000e-04.\n",
      "Epoch: 155 | Loss: 0.2881 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 160 | Loss: 0.3796 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 165 | Loss: 0.4168 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 170 | Loss: 0.3455 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 175 | Loss: 0.3814 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 180 | Loss: 0.3113 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 185 | Loss: 0.3814 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 190 | Loss: 0.3455 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 195 | Loss: 0.3175 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 200 | Loss: 0.3787 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 205 | Loss: 0.3805 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch 00042: reducing learning rate of group 0 to 1.2500e-04.\n",
      "Epoch: 210 | Loss: 0.2426 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 215 | Loss: 0.3800 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 220 | Loss: 0.3849 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 225 | Loss: 0.2774 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 230 | Loss: 0.3113 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 235 | Loss: 0.2432 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 240 | Loss: 0.4143 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 245 | Loss: 0.3455 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Early stopping at epoch 245\n",
      "\n",
      "=== Final Test Results ===\n",
      "Test AUC: 0.6236 | Test AP: 0.1237 | Test F1: 0.4750 | G-mean: 0.0000\n",
      "\n",
      "=== Running experiment for Medium model ===\n",
      "Total trainable parameters: 8.31M\n",
      "cuda\n",
      "loading data...\n",
      "\n",
      "=== Starting Pretraining ===\n",
      "Pretrain Epoch: 000, TSNE Loss: 0.7269\n",
      "Pretrain Epoch: 005, TSNE Loss: 0.5151\n",
      "Pretrain Epoch: 010, TSNE Loss: 0.2545\n",
      "Pretrain Epoch: 015, TSNE Loss: -0.0586\n",
      "Pretrain Epoch: 020, TSNE Loss: -0.3819\n",
      "Pretrain Epoch: 025, TSNE Loss: -0.6340\n",
      "Pretrain Epoch: 030, TSNE Loss: -0.8563\n",
      "Pretrain Epoch: 035, TSNE Loss: -1.0226\n",
      "Pretrain Epoch: 040, TSNE Loss: -1.1620\n",
      "Pretrain Epoch: 045, TSNE Loss: -1.2707\n",
      "Pretrain Epoch: 050, TSNE Loss: -1.3357\n",
      "Pretrain Epoch: 055, TSNE Loss: -1.4345\n",
      "Pretrain Epoch: 060, TSNE Loss: -1.4951\n",
      "Pretrain Epoch: 065, TSNE Loss: -1.5326\n",
      "Pretrain Epoch: 070, TSNE Loss: -1.6025\n",
      "Pretrain Epoch: 075, TSNE Loss: -1.6136\n",
      "Pretrain Epoch: 080, TSNE Loss: -1.6812\n",
      "Pretrain Epoch: 085, TSNE Loss: -1.7385\n",
      "Pretrain Epoch: 090, TSNE Loss: -1.7667\n",
      "Pretrain Epoch: 095, TSNE Loss: -1.7930\n",
      "Pretrain Epoch: 100, TSNE Loss: -1.8412\n",
      "Pretrain Epoch: 105, TSNE Loss: -1.8662\n",
      "Pretrain Epoch: 110, TSNE Loss: -1.8679\n",
      "Pretrain Epoch: 115, TSNE Loss: -1.9242\n",
      "Pretrain Epoch: 120, TSNE Loss: -1.9279\n",
      "Pretrain Epoch: 125, TSNE Loss: -1.9686\n",
      "Pretrain Epoch: 130, TSNE Loss: -1.9570\n",
      "Pretrain Epoch: 135, TSNE Loss: -1.9914\n",
      "Pretrain Epoch: 140, TSNE Loss: -2.0345\n",
      "Pretrain Epoch: 145, TSNE Loss: -2.0542\n",
      "Pretrain Epoch: 150, TSNE Loss: -2.0870\n",
      "Pretrain Epoch: 155, TSNE Loss: -2.0555\n",
      "Pretrain Epoch: 160, TSNE Loss: -2.1085\n",
      "Pretrain Epoch: 165, TSNE Loss: -2.0839\n",
      "Pretrain Epoch: 170, TSNE Loss: -2.1540\n",
      "Pretrain Epoch: 175, TSNE Loss: -2.1634\n",
      "Pretrain Epoch: 180, TSNE Loss: -2.1728\n",
      "Pretrain Epoch: 185, TSNE Loss: -2.1957\n",
      "Pretrain Epoch: 190, TSNE Loss: -2.2408\n",
      "Pretrain Epoch: 195, TSNE Loss: -2.1994\n",
      "\n",
      "=== Starting Fine-tuning ===\n",
      "Epoch: 000 | Loss: 6980.8254 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 005 | Loss: 8.8352 | Val AUC: 0.5898 | Val F1: 0.5047\n",
      "Epoch: 010 | Loss: 1.1911 | Val AUC: 0.5907 | Val F1: 0.4751\n",
      "Epoch: 015 | Loss: 0.6149 | Val AUC: 0.5901 | Val F1: 0.4751\n",
      "Epoch: 020 | Loss: 0.4289 | Val AUC: 0.5921 | Val F1: 0.4751\n",
      "Epoch: 025 | Loss: 0.3142 | Val AUC: 0.5900 | Val F1: 0.4751\n",
      "Epoch: 030 | Loss: 0.3204 | Val AUC: 0.5910 | Val F1: 0.4751\n",
      "Epoch: 035 | Loss: 0.4838 | Val AUC: 0.5900 | Val F1: 0.4751\n",
      "Epoch: 040 | Loss: 0.4872 | Val AUC: 0.5888 | Val F1: 0.4751\n",
      "Epoch: 045 | Loss: 0.3908 | Val AUC: 0.5915 | Val F1: 0.4751\n",
      "Epoch: 050 | Loss: 0.4190 | Val AUC: 0.6091 | Val F1: 0.4751\n",
      "Epoch: 055 | Loss: 0.2466 | Val AUC: 0.6027 | Val F1: 0.4751\n",
      "Epoch: 060 | Loss: 0.4526 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 065 | Loss: 0.3199 | Val AUC: 0.6087 | Val F1: 0.4751\n",
      "Epoch: 070 | Loss: 0.2716 | Val AUC: 0.5915 | Val F1: 0.4751\n",
      "Epoch: 075 | Loss: 0.4025 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 080 | Loss: 0.2505 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 085 | Loss: 0.2319 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 090 | Loss: 0.3788 | Val AUC: 0.5000 | Val F1: 0.4751\n",
      "Epoch: 095 | Loss: 0.8960 | Val AUC: 0.5000 | Val F1: 0.4751\n"
     ]
    }
   ],
   "source": [
    "# 第二次实验结果\n",
    "run_scaling_experiments()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "3e7001cd",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "tsne01",
   "language": "python",
   "name": "tsne01"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.11.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
