{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2024-03-03 17:48:55.486510: I tensorflow/core/util/port.cc:113] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\n",
      "2024-03-03 17:48:55.526873: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n",
      "2024-03-03 17:48:55.526900: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n",
      "2024-03-03 17:48:55.528208: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n",
      "2024-03-03 17:48:55.535370: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n",
      "To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n",
      "2024-03-03 17:48:56.371986: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n"
     ]
    }
   ],
   "source": [
    "import torch\n",
    "from torch.optim import AdamW\n",
    "from avalanche.evaluation.metrics.accuracy import Accuracy\n",
    "from tqdm import tqdm\n",
    "from timm.models import create_model\n",
    "from timm.scheduler.cosine_lr import CosineLRScheduler\n",
    "from timm.loss import LabelSmoothingCrossEntropy\n",
    "from argparse import ArgumentParser\n",
    "from utils import (\n",
    "    get_config,\n",
    "    set_seed,\n",
    "    mkdirss,\n",
    "    merge_config,\n",
    "    partial_save,\n",
    "    # partial_load,\n",
    ")\n",
    "import psutil\n",
    "from logger import create_logger\n",
    "from src.criterion import MoECtn\n",
    "\n",
    "\n",
    "SAVE_KEYS = [\"mov\"]\n",
    "\n",
    "\n",
    "def train(\n",
    "    config, model, criterion, dl, opt, scheduler, logger, epoch\n",
    "):\n",
    "\n",
    "    model.train()\n",
    "    model = model.cuda()\n",
    "\n",
    "    for ep in tqdm(range(epoch)):\n",
    "        model.train()\n",
    "        model = model.cuda()\n",
    "        # pbar = tqdm(dl)\n",
    "        for i, batch in enumerate(dl):\n",
    "            # torch.cuda.empty_cache()\n",
    "            x, y = batch[0].cuda(), batch[1].cuda()\n",
    "            out = model(x)\n",
    "\n",
    "            # loss = F.cross_entropy(out, y)\n",
    "            criterion(out, y)\n",
    "            opt.zero_grad()\n",
    "            criterion.backward()\n",
    "            opt.step()\n",
    "\n",
    "        if scheduler is not None:\n",
    "            scheduler.step(ep)\n",
    "\n",
    "        ram_used = psutil.virtual_memory().used / (1024.0 * 1024.0)\n",
    "        memory_used = torch.cuda.max_memory_allocated() / (\n",
    "            1024.0 * 1024.0\n",
    "        )\n",
    "        logger.info(\n",
    "            \"RAM used: \"\n",
    "            + str(ram_used)\n",
    "            + \" memory: \"\n",
    "            + str(memory_used)\n",
    "            + \"MB\"\n",
    "        )\n",
    "\n",
    "        if ep % 10 == 9:\n",
    "            # memory_used = torch.cuda.max_memory_allocated() / (1024.0 * 1024.0)\n",
    "            acc = test(model, test_dl)\n",
    "            if acc > config.best_acc:\n",
    "                config.best_acc = acc\n",
    "                partial_save(\n",
    "                    SAVE_KEYS,\n",
    "                    model,\n",
    "                    config.model_type,\n",
    "                    config.task,\n",
    "                    config.name,\n",
    "                    acc,\n",
    "                    ep,\n",
    "                )\n",
    "            logger.info(\n",
    "                str(ep)\n",
    "                + \" \"\n",
    "                + str(acc)\n",
    "                + \" memory: \"\n",
    "                + str(memory_used)\n",
    "                + \"MB\"\n",
    "            )\n",
    "    model = model.cpu()\n",
    "    return model\n",
    "\n",
    "\n",
    "@torch.no_grad()\n",
    "def test(model, dl):\n",
    "    model.eval()\n",
    "    acc = Accuracy()\n",
    "    # pbar = tqdm(dl)\n",
    "    model = model.cuda()\n",
    "    for batch in dl:  # pbar:\n",
    "        torch.cuda.empty_cache()\n",
    "        x, y = batch[0].cuda(), batch[1].cuda()\n",
    "        out = model(x).data\n",
    "        acc.update(out.argmax(dim=1).view(-1), y)\n",
    "        # acc.update(out.argmax(dim=1).view(-1), y, 0)\n",
    "\n",
    "    return acc.result()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO - 03/03/24 17:48:57 - 0:00:00 - Namespace(seed=42, lr=0.001, wd=0.0001, eval='True', dpr=0.1, model='vit_base_patch16_224_in21k_mov', model_checkpoint='./released_models/ViT-B_16.npz', model_type='vit_mov', task='vtab', dataset='cifar', tuning_mode='mov', num_experts=20, loss_coef=0.1)\n",
      "INFO - 03/03/24 17:48:57 - 0:00:00 - namespace(name='cifar_mov_001', class_num=100, train_aug=False, labelsmoothing=0.1, batch_size=64, epochs=100, warmup_epochs=10, seed=42, lr=0.001, wd=0.0001, eval='True', dpr=0.1, model='vit_base_patch16_224_in21k_mov', model_checkpoint='./released_models/ViT-B_16.npz', model_type='vit_mov', task='vtab', dataset='cifar', tuning_mode='mov', num_experts=20, loss_coef=0.1)\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "namespace(name='cifar_mov_001', class_num=100, train_aug=False, labelsmoothing=0.1, batch_size=64, epochs=100, warmup_epochs=10, seed=42, lr=0.001, wd=0.0001, eval='True', dpr=0.1, model='vit_base_patch16_224_in21k_mov', model_checkpoint='./released_models/ViT-B_16.npz', model_type='vit_mov', task='vtab', dataset='cifar', tuning_mode='mov', num_experts=20, loss_coef=0.1)\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "INFO - 03/03/24 17:48:59 - 0:00:02 - VisionTransformer_mov(\n",
      "                                       (patch_embed): PatchEmbed(\n",
      "                                         (proj): Conv2d(3, 768, kernel_size=(16, 16), stride=(16, 16))\n",
      "                                         (norm): Identity()\n",
      "                                       )\n",
      "                                       (pos_drop): Dropout(p=0.0, inplace=False)\n",
      "                                       (blocks): Sequential(\n",
      "                                         (0): Block(\n",
      "                                           (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (attn): Attention(\n",
      "                                             (qkv): Linear(in_features=768, out_features=2304, bias=True)\n",
      "                                             (attn_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (proj): Linear(in_features=768, out_features=768, bias=True)\n",
      "                                             (proj_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoVAttention(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls1): Identity()\n",
      "                                           (drop_path1): Identity()\n",
      "                                           (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (mlp): Mlp(\n",
      "                                             (fc1): Linear(in_features=768, out_features=3072, bias=True)\n",
      "                                             (act): GELU(approximate='none')\n",
      "                                             (drop1): Dropout(p=0.0, inplace=False)\n",
      "                                             (fc2): Linear(in_features=3072, out_features=768, bias=True)\n",
      "                                             (drop2): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoV(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls2): Identity()\n",
      "                                           (drop_path2): Identity()\n",
      "                                         )\n",
      "                                         (1): Block(\n",
      "                                           (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (attn): Attention(\n",
      "                                             (qkv): Linear(in_features=768, out_features=2304, bias=True)\n",
      "                                             (attn_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (proj): Linear(in_features=768, out_features=768, bias=True)\n",
      "                                             (proj_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoVAttention(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls1): Identity()\n",
      "                                           (drop_path1): DropPath(drop_prob=0.009)\n",
      "                                           (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (mlp): Mlp(\n",
      "                                             (fc1): Linear(in_features=768, out_features=3072, bias=True)\n",
      "                                             (act): GELU(approximate='none')\n",
      "                                             (drop1): Dropout(p=0.0, inplace=False)\n",
      "                                             (fc2): Linear(in_features=3072, out_features=768, bias=True)\n",
      "                                             (drop2): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoV(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls2): Identity()\n",
      "                                           (drop_path2): DropPath(drop_prob=0.009)\n",
      "                                         )\n",
      "                                         (2): Block(\n",
      "                                           (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (attn): Attention(\n",
      "                                             (qkv): Linear(in_features=768, out_features=2304, bias=True)\n",
      "                                             (attn_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (proj): Linear(in_features=768, out_features=768, bias=True)\n",
      "                                             (proj_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoVAttention(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls1): Identity()\n",
      "                                           (drop_path1): DropPath(drop_prob=0.018)\n",
      "                                           (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (mlp): Mlp(\n",
      "                                             (fc1): Linear(in_features=768, out_features=3072, bias=True)\n",
      "                                             (act): GELU(approximate='none')\n",
      "                                             (drop1): Dropout(p=0.0, inplace=False)\n",
      "                                             (fc2): Linear(in_features=3072, out_features=768, bias=True)\n",
      "                                             (drop2): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoV(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls2): Identity()\n",
      "                                           (drop_path2): DropPath(drop_prob=0.018)\n",
      "                                         )\n",
      "                                         (3): Block(\n",
      "                                           (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (attn): Attention(\n",
      "                                             (qkv): Linear(in_features=768, out_features=2304, bias=True)\n",
      "                                             (attn_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (proj): Linear(in_features=768, out_features=768, bias=True)\n",
      "                                             (proj_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoVAttention(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls1): Identity()\n",
      "                                           (drop_path1): DropPath(drop_prob=0.027)\n",
      "                                           (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (mlp): Mlp(\n",
      "                                             (fc1): Linear(in_features=768, out_features=3072, bias=True)\n",
      "                                             (act): GELU(approximate='none')\n",
      "                                             (drop1): Dropout(p=0.0, inplace=False)\n",
      "                                             (fc2): Linear(in_features=3072, out_features=768, bias=True)\n",
      "                                             (drop2): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoV(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls2): Identity()\n",
      "                                           (drop_path2): DropPath(drop_prob=0.027)\n",
      "                                         )\n",
      "                                         (4): Block(\n",
      "                                           (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (attn): Attention(\n",
      "                                             (qkv): Linear(in_features=768, out_features=2304, bias=True)\n",
      "                                             (attn_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (proj): Linear(in_features=768, out_features=768, bias=True)\n",
      "                                             (proj_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoVAttention(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls1): Identity()\n",
      "                                           (drop_path1): DropPath(drop_prob=0.036)\n",
      "                                           (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (mlp): Mlp(\n",
      "                                             (fc1): Linear(in_features=768, out_features=3072, bias=True)\n",
      "                                             (act): GELU(approximate='none')\n",
      "                                             (drop1): Dropout(p=0.0, inplace=False)\n",
      "                                             (fc2): Linear(in_features=3072, out_features=768, bias=True)\n",
      "                                             (drop2): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoV(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls2): Identity()\n",
      "                                           (drop_path2): DropPath(drop_prob=0.036)\n",
      "                                         )\n",
      "                                         (5): Block(\n",
      "                                           (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (attn): Attention(\n",
      "                                             (qkv): Linear(in_features=768, out_features=2304, bias=True)\n",
      "                                             (attn_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (proj): Linear(in_features=768, out_features=768, bias=True)\n",
      "                                             (proj_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoVAttention(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls1): Identity()\n",
      "                                           (drop_path1): DropPath(drop_prob=0.045)\n",
      "                                           (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (mlp): Mlp(\n",
      "                                             (fc1): Linear(in_features=768, out_features=3072, bias=True)\n",
      "                                             (act): GELU(approximate='none')\n",
      "                                             (drop1): Dropout(p=0.0, inplace=False)\n",
      "                                             (fc2): Linear(in_features=3072, out_features=768, bias=True)\n",
      "                                             (drop2): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoV(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls2): Identity()\n",
      "                                           (drop_path2): DropPath(drop_prob=0.045)\n",
      "                                         )\n",
      "                                         (6): Block(\n",
      "                                           (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (attn): Attention(\n",
      "                                             (qkv): Linear(in_features=768, out_features=2304, bias=True)\n",
      "                                             (attn_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (proj): Linear(in_features=768, out_features=768, bias=True)\n",
      "                                             (proj_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoVAttention(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls1): Identity()\n",
      "                                           (drop_path1): DropPath(drop_prob=0.055)\n",
      "                                           (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (mlp): Mlp(\n",
      "                                             (fc1): Linear(in_features=768, out_features=3072, bias=True)\n",
      "                                             (act): GELU(approximate='none')\n",
      "                                             (drop1): Dropout(p=0.0, inplace=False)\n",
      "                                             (fc2): Linear(in_features=3072, out_features=768, bias=True)\n",
      "                                             (drop2): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoV(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls2): Identity()\n",
      "                                           (drop_path2): DropPath(drop_prob=0.055)\n",
      "                                         )\n",
      "                                         (7): Block(\n",
      "                                           (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (attn): Attention(\n",
      "                                             (qkv): Linear(in_features=768, out_features=2304, bias=True)\n",
      "                                             (attn_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (proj): Linear(in_features=768, out_features=768, bias=True)\n",
      "                                             (proj_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoVAttention(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls1): Identity()\n",
      "                                           (drop_path1): DropPath(drop_prob=0.064)\n",
      "                                           (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (mlp): Mlp(\n",
      "                                             (fc1): Linear(in_features=768, out_features=3072, bias=True)\n",
      "                                             (act): GELU(approximate='none')\n",
      "                                             (drop1): Dropout(p=0.0, inplace=False)\n",
      "                                             (fc2): Linear(in_features=3072, out_features=768, bias=True)\n",
      "                                             (drop2): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoV(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls2): Identity()\n",
      "                                           (drop_path2): DropPath(drop_prob=0.064)\n",
      "                                         )\n",
      "                                         (8): Block(\n",
      "                                           (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (attn): Attention(\n",
      "                                             (qkv): Linear(in_features=768, out_features=2304, bias=True)\n",
      "                                             (attn_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (proj): Linear(in_features=768, out_features=768, bias=True)\n",
      "                                             (proj_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoVAttention(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls1): Identity()\n",
      "                                           (drop_path1): DropPath(drop_prob=0.073)\n",
      "                                           (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (mlp): Mlp(\n",
      "                                             (fc1): Linear(in_features=768, out_features=3072, bias=True)\n",
      "                                             (act): GELU(approximate='none')\n",
      "                                             (drop1): Dropout(p=0.0, inplace=False)\n",
      "                                             (fc2): Linear(in_features=3072, out_features=768, bias=True)\n",
      "                                             (drop2): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoV(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls2): Identity()\n",
      "                                           (drop_path2): DropPath(drop_prob=0.073)\n",
      "                                         )\n",
      "                                         (9): Block(\n",
      "                                           (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (attn): Attention(\n",
      "                                             (qkv): Linear(in_features=768, out_features=2304, bias=True)\n",
      "                                             (attn_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (proj): Linear(in_features=768, out_features=768, bias=True)\n",
      "                                             (proj_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoVAttention(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls1): Identity()\n",
      "                                           (drop_path1): DropPath(drop_prob=0.082)\n",
      "                                           (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (mlp): Mlp(\n",
      "                                             (fc1): Linear(in_features=768, out_features=3072, bias=True)\n",
      "                                             (act): GELU(approximate='none')\n",
      "                                             (drop1): Dropout(p=0.0, inplace=False)\n",
      "                                             (fc2): Linear(in_features=3072, out_features=768, bias=True)\n",
      "                                             (drop2): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoV(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls2): Identity()\n",
      "                                           (drop_path2): DropPath(drop_prob=0.082)\n",
      "                                         )\n",
      "                                         (10): Block(\n",
      "                                           (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (attn): Attention(\n",
      "                                             (qkv): Linear(in_features=768, out_features=2304, bias=True)\n",
      "                                             (attn_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (proj): Linear(in_features=768, out_features=768, bias=True)\n",
      "                                             (proj_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoVAttention(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls1): Identity()\n",
      "                                           (drop_path1): DropPath(drop_prob=0.091)\n",
      "                                           (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (mlp): Mlp(\n",
      "                                             (fc1): Linear(in_features=768, out_features=3072, bias=True)\n",
      "                                             (act): GELU(approximate='none')\n",
      "                                             (drop1): Dropout(p=0.0, inplace=False)\n",
      "                                             (fc2): Linear(in_features=3072, out_features=768, bias=True)\n",
      "                                             (drop2): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoV(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls2): Identity()\n",
      "                                           (drop_path2): DropPath(drop_prob=0.091)\n",
      "                                         )\n",
      "                                         (11): Block(\n",
      "                                           (norm1): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (attn): Attention(\n",
      "                                             (qkv): Linear(in_features=768, out_features=2304, bias=True)\n",
      "                                             (attn_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (proj): Linear(in_features=768, out_features=768, bias=True)\n",
      "                                             (proj_drop): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoVAttention(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls1): Identity()\n",
      "                                           (drop_path1): DropPath(drop_prob=0.100)\n",
      "                                           (norm2): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                           (mlp): Mlp(\n",
      "                                             (fc1): Linear(in_features=768, out_features=3072, bias=True)\n",
      "                                             (act): GELU(approximate='none')\n",
      "                                             (drop1): Dropout(p=0.0, inplace=False)\n",
      "                                             (fc2): Linear(in_features=3072, out_features=768, bias=True)\n",
      "                                             (drop2): Dropout(p=0.0, inplace=False)\n",
      "                                             (mov): MoV(\n",
      "                                               (router): SoftRouter(\n",
      "                                                 (router_weights): RouterWeights()\n",
      "                                               )\n",
      "                                             )\n",
      "                                           )\n",
      "                                           (ls2): Identity()\n",
      "                                           (drop_path2): DropPath(drop_prob=0.100)\n",
      "                                         )\n",
      "                                       )\n",
      "                                       (norm): LayerNorm((768,), eps=1e-06, elementwise_affine=True)\n",
      "                                       (fc_norm): Identity()\n",
      "                                       (head): Linear(in_features=768, out_features=100, bias=True)\n",
      "                                     )\n"
     ]
    }
   ],
   "source": [
    "import sys\n",
    "from src.models import vision_transformer_mov\n",
    "\n",
    "\n",
    "if __name__ == \"__main__\":\n",
    "    parser = ArgumentParser()\n",
    "    parser.add_argument(\"--seed\", type=int, default=42)\n",
    "    parser.add_argument(\"--lr\", type=float, default=1e-3)\n",
    "    parser.add_argument(\"--wd\", type=float, default=1e-4)\n",
    "    parser.add_argument(\"--eval\", type=str, default=\"True\")\n",
    "    parser.add_argument(\"--dpr\", type=float, default=0.1)\n",
    "    parser.add_argument(\n",
    "        \"--model\", type=str, default=\"vit_base_patch16_224_in21k_mov\"\n",
    "    )\n",
    "    parser.add_argument(\n",
    "        \"--model_ckp\",\n",
    "        type=str,\n",
    "        default=\"./released_models/ViT-B_16.npz\",\n",
    "    )\n",
    "    parser.add_argument(\"--model_type\", type=str, default=\"vit_mov\")\n",
    "    parser.add_argument(\"--task\", type=str, default=\"vtab\")\n",
    "    parser.add_argument(\"--dataset\", type=str, default=\"cifar\")\n",
    "    parser.add_argument(\"--tuning_mode\", type=str, default=\"mov\")\n",
    "    parser.add_argument(\"--num_experts\", type=int, default=None)\n",
    "    parser.add_argument(\"--loss_coef\", type=float, default=None)\n",
    "\n",
    "    sys.argv = [\n",
    "        \"review.ipynb\",\n",
    "        \"--num_experts\",\n",
    "        \"20\",\n",
    "        \"--loss_coef\",\n",
    "        \"0.1\",\n",
    "    ]\n",
    "    args = parser.parse_args()\n",
    "    config = get_config(\"model_mov\", args.task, args.dataset)\n",
    "    config = merge_config(args, config)\n",
    "    print(config)\n",
    "\n",
    "    set_seed(config.seed)\n",
    "\n",
    "    num_experts = config.num_experts\n",
    "\n",
    "    exp_base_path = \"./output/%s/%s/%s\" % (\n",
    "        config.model_type,\n",
    "        config.task,\n",
    "        config.name + \"_dim_%d\" % (num_experts),\n",
    "    )\n",
    "    mkdirss(exp_base_path)\n",
    "    logger = create_logger(\n",
    "        output_dir=exp_base_path, name=\"Mixture_of_Vectors\"\n",
    "    )\n",
    "\n",
    "    logger.info(args)\n",
    "    logger.info(config)\n",
    "\n",
    "    # prepare training data\n",
    "    if config.eval == \"True\":\n",
    "        evalflag = True\n",
    "    else:\n",
    "        evalflag = False\n",
    "\n",
    "    if config.task == \"vtab\":\n",
    "        from vtab import get_data\n",
    "\n",
    "        basedir = \"./data/vtab-1k\"\n",
    "    elif config.task == \"fgvc\":\n",
    "        # from fgvc import *\n",
    "        raise NotImplementedError\n",
    "\n",
    "    # if \"train_aug\" in config.keys():\n",
    "    if hasattr(config, \"train_aug\"):\n",
    "        train_aug = config.train_aug\n",
    "    else:\n",
    "        train_aug = False\n",
    "\n",
    "    if \"swin\" in config.model:\n",
    "        model = create_model(\n",
    "            config.model,\n",
    "            pretrained=False,\n",
    "            drop_path_rate=config.dpr,\n",
    "            tuning_mode=config.tuning_mode,\n",
    "            num_experts=num_experts,\n",
    "        )\n",
    "        model.load_state_dict(\n",
    "            torch.load(config.model_ckp)[\"model\"], False\n",
    "        )  # not include adapt module\n",
    "    else:\n",
    "        model = create_model(\n",
    "            config.model,\n",
    "            checkpoint_path=config.model_ckp,\n",
    "            drop_path_rate=config.dpr,\n",
    "            tuning_mode=config.tuning_mode,\n",
    "            num_experts=num_experts,\n",
    "        )\n",
    "\n",
    "    model.reset_classifier(config.class_num)\n",
    "\n",
    "    logger.info(str(model))\n",
    "\n",
    "    config.best_acc = 0\n",
    "    config.task = config.task"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "blocks.0.attn.mov.scaling\n",
      "blocks.0.attn.mov.router.router_weights.kernel\n",
      "blocks.0.attn.mov.router.router_weights.bias\n",
      "blocks.0.mlp.mov.scaling\n",
      "blocks.0.mlp.mov.router.router_weights.kernel\n",
      "blocks.0.mlp.mov.router.router_weights.bias\n",
      "blocks.1.attn.mov.scaling\n",
      "blocks.1.attn.mov.router.router_weights.kernel\n",
      "blocks.1.attn.mov.router.router_weights.bias\n",
      "blocks.1.mlp.mov.scaling\n",
      "blocks.1.mlp.mov.router.router_weights.kernel\n",
      "blocks.1.mlp.mov.router.router_weights.bias\n",
      "blocks.2.attn.mov.scaling\n",
      "blocks.2.attn.mov.router.router_weights.kernel\n",
      "blocks.2.attn.mov.router.router_weights.bias\n",
      "blocks.2.mlp.mov.scaling\n",
      "blocks.2.mlp.mov.router.router_weights.kernel\n",
      "blocks.2.mlp.mov.router.router_weights.bias\n",
      "blocks.3.attn.mov.scaling\n",
      "blocks.3.attn.mov.router.router_weights.kernel\n",
      "blocks.3.attn.mov.router.router_weights.bias\n",
      "blocks.3.mlp.mov.scaling\n",
      "blocks.3.mlp.mov.router.router_weights.kernel\n",
      "blocks.3.mlp.mov.router.router_weights.bias\n",
      "blocks.4.attn.mov.scaling\n",
      "blocks.4.attn.mov.router.router_weights.kernel\n",
      "blocks.4.attn.mov.router.router_weights.bias\n",
      "blocks.4.mlp.mov.scaling\n",
      "blocks.4.mlp.mov.router.router_weights.kernel\n",
      "blocks.4.mlp.mov.router.router_weights.bias\n",
      "blocks.5.attn.mov.scaling\n",
      "blocks.5.attn.mov.router.router_weights.kernel\n",
      "blocks.5.attn.mov.router.router_weights.bias\n",
      "blocks.5.mlp.mov.scaling\n",
      "blocks.5.mlp.mov.router.router_weights.kernel\n",
      "blocks.5.mlp.mov.router.router_weights.bias\n",
      "blocks.6.attn.mov.scaling\n",
      "blocks.6.attn.mov.router.router_weights.kernel\n",
      "blocks.6.attn.mov.router.router_weights.bias\n",
      "blocks.6.mlp.mov.scaling\n",
      "blocks.6.mlp.mov.router.router_weights.kernel\n",
      "blocks.6.mlp.mov.router.router_weights.bias\n",
      "blocks.7.attn.mov.scaling\n",
      "blocks.7.attn.mov.router.router_weights.kernel\n",
      "blocks.7.attn.mov.router.router_weights.bias\n",
      "blocks.7.mlp.mov.scaling\n",
      "blocks.7.mlp.mov.router.router_weights.kernel\n",
      "blocks.7.mlp.mov.router.router_weights.bias\n",
      "blocks.8.attn.mov.scaling\n",
      "blocks.8.attn.mov.router.router_weights.kernel\n",
      "blocks.8.attn.mov.router.router_weights.bias\n",
      "blocks.8.mlp.mov.scaling\n",
      "blocks.8.mlp.mov.router.router_weights.kernel\n",
      "blocks.8.mlp.mov.router.router_weights.bias\n",
      "blocks.9.attn.mov.scaling\n",
      "blocks.9.attn.mov.router.router_weights.kernel\n",
      "blocks.9.attn.mov.router.router_weights.bias\n",
      "blocks.9.mlp.mov.scaling\n",
      "blocks.9.mlp.mov.router.router_weights.kernel\n",
      "blocks.9.mlp.mov.router.router_weights.bias\n",
      "blocks.10.attn.mov.scaling\n",
      "blocks.10.attn.mov.router.router_weights.kernel\n",
      "blocks.10.attn.mov.router.router_weights.bias\n",
      "blocks.10.mlp.mov.scaling\n",
      "blocks.10.mlp.mov.router.router_weights.kernel\n",
      "blocks.10.mlp.mov.router.router_weights.bias\n",
      "blocks.11.attn.mov.scaling\n",
      "blocks.11.attn.mov.router.router_weights.kernel\n",
      "blocks.11.attn.mov.router.router_weights.bias\n",
      "blocks.11.mlp.mov.scaling\n",
      "blocks.11.mlp.mov.router.router_weights.kernel\n",
      "blocks.11.mlp.mov.router.router_weights.bias\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Processing item 10: 100%|██████████| 10/10 [00:05<00:00,  1.99it/s]\n"
     ]
    }
   ],
   "source": [
    "save_keys = ['mov', 'adapt', 'sdfasdf']\n",
    "for k, v in model.named_parameters():\n",
    "    # if save kyes in k, save it\n",
    "    if any([key in k for key in save_keys]):\n",
    "        print(k)\n",
    "from tqdm import tqdm\n",
    "import time\n",
    "\n",
    "# Initialize tqdm with a total number of iterations\n",
    "progress_bar = tqdm(total=10)\n",
    "\n",
    "# Iterate over a range and update the description dynamically\n",
    "for i in range(10):\n",
    "    # Update the description\n",
    "    progress_bar.set_description(f\"Processing item {i+1}\")\n",
    "\n",
    "    # Perform the task or computation\n",
    "    time.sleep(0.5)\n",
    "\n",
    "    # Update the progress bar\n",
    "    progress_bar.update(1)\n",
    "\n",
    "# Close the progress bar\n",
    "progress_bar.close()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "metadata": {},
   "outputs": [
    {
     "ename": "TypeError",
     "evalue": "list indices must be integers or slices, not tuple",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mTypeError\u001b[0m                                 Traceback (most recent call last)",
      "Cell \u001b[0;32mIn[36], line 5\u001b[0m\n\u001b[1;32m      3\u001b[0m c \u001b[38;5;241m=\u001b[39m [a,b]\n\u001b[1;32m      4\u001b[0m c\n\u001b[0;32m----> 5\u001b[0m \u001b[43mc\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m2\u001b[39;49m\u001b[43m]\u001b[49m\n",
      "\u001b[0;31mTypeError\u001b[0m: list indices must be integers or slices, not tuple"
     ]
    },
    {
     "ename": "",
     "evalue": "",
     "output_type": "error",
     "traceback": [
      "\u001b[1;31mThe Kernel crashed while executing code in the current cell or a previous cell. \n",
      "\u001b[1;31mPlease review the code in the cell(s) to identify a possible cause of the failure. \n",
      "\u001b[1;31mClick <a href='https://aka.ms/vscodeJupyterKernelCrash'>here</a> for more info. \n",
      "\u001b[1;31mView Jupyter <a href='command:jupyter.viewOutput'>log</a> for further details."
     ]
    }
   ],
   "source": [
    "a = [3,4,5]\n",
    "b = [6,7,8]\n",
    "c = [a,b]\n",
    "c\n",
    "c[0, 2]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## hooker for computing the routing losses"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "import torch\n",
    "import torch.nn as nn\n",
    "from typing import Optional\n",
    "\n",
    "\n",
    "class MoECtn(nn.Module):\n",
    "    \"\"\"Unified loss class for MoV and MoVi models.\n",
    "\n",
    "    Methods:\n",
    "        backward: backpropagate all the losses.\n",
    "    \"\"\"\n",
    "\n",
    "    def __init__(self, loss_coef: float = 0.01):\n",
    "        super().__init__()\n",
    "        self.loss_coef = loss_coef\n",
    "        self.routing_losses = []\n",
    "        self.additional_losses = []\n",
    "\n",
    "        self.addtional_lossfun = []\n",
    "\n",
    "    def _reset_losses(self):\n",
    "        self.routing_losses = []\n",
    "        self.additional_losses = []\n",
    "\n",
    "    def add(self, lossfun):\n",
    "        self.addtional_lossfun.append(lossfun)\n",
    "\n",
    "    def forward(self, logits, targets):\n",
    "        for lossfun in self.addtional_lossfun:\n",
    "            self.additional_losses.append(lossfun(logits, targets))\n",
    "\n",
    "    def compute_zloss(\n",
    "        self,\n",
    "        module,\n",
    "        module_in,\n",
    "        router_logits: torch.Tensor,\n",
    "    ):\n",
    "        del module, module_in\n",
    "        batch_size, length, *_ = router_logits.shape\n",
    "        log_z = torch.logsumexp(router_logits, dim=-1)\n",
    "        z_loss = log_z**2\n",
    "\n",
    "        self.routing_losses.append(\n",
    "            self.loss_coef\n",
    "            * torch.sum(z_loss, dtype=torch.float32)\n",
    "            / (batch_size * length)\n",
    "        )\n",
    "\n",
    "    def backward(self, *args):\n",
    "        loss_total = 0\n",
    "        for argloss in args:\n",
    "            assert isinstance(\n",
    "                argloss, torch.Tensor\n",
    "            ), \"loss must be a tensor\"\n",
    "            assert argloss.requires_grad, \"loss must require grad\"\n",
    "            loss_total += argloss\n",
    "        loss_total += torch.stack(self.routing_losses).mean()\n",
    "        loss_total.backward()\n",
    "        self._reset_losses()\n",
    "\n",
    "    @property\n",
    "    def value(self) -> torch.Tensor:\n",
    "        routing_loss = (\n",
    "            torch.stack(self.routing_losses).mean()\n",
    "            if self.routing_losses\n",
    "            else torch.tensor(0)\n",
    "        )\n",
    "        additional_loss = (\n",
    "            torch.stack(self.additional_losses).mean()\n",
    "            if self.additional_losses\n",
    "            else torch.tensor(0)\n",
    "        )\n",
    "        return routing_loss + additional_loss\n",
    "\n",
    "    @property\n",
    "    def route_value(self) -> torch.Tensor:\n",
    "        routing_loss = (\n",
    "            torch.stack(self.routing_losses).mean()\n",
    "            if self.routing_losses\n",
    "            else torch.tensor(0)\n",
    "        )\n",
    "        return routing_loss\n",
    "\n",
    "    @property\n",
    "    def additional_value(self) -> torch.Tensor:\n",
    "        additional_loss = (\n",
    "            torch.stack(self.additional_losses).mean()\n",
    "            if self.additional_losses\n",
    "            else torch.tensor(0)\n",
    "        )\n",
    "        return additional_loss"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor([[-0.1629, -0.0256, -0.2655,  ...,  0.0036, -0.1221, -0.1223],\n",
       "        [-0.1060,  0.1382, -0.1597,  ...,  0.0454, -0.0688, -0.0785],\n",
       "        [-0.0707,  0.0151, -0.2619,  ...,  0.0317, -0.1046, -0.0990],\n",
       "        ...,\n",
       "        [-0.1695, -0.0110, -0.1485,  ..., -0.0061, -0.0780, -0.0938],\n",
       "        [-0.2303, -0.0599, -0.1348,  ..., -0.0015, -0.0964, -0.0407],\n",
       "        [-0.1001, -0.0908,  0.0379,  ...,  0.0694, -0.0960,  0.0680]],\n",
       "       grad_fn=<AddmmBackward0>)"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "inputs = torch.randn(32, 3, 224, 224)\n",
    "# model(inputs)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "torch.Size([32, 197, 12, 20])\n",
      "torch.Size([32, 197, 20])\n",
      "torch.Size([32, 197, 12, 20])\n",
      "torch.Size([32, 197, 20])\n",
      "torch.Size([32, 197, 12, 20])\n",
      "torch.Size([32, 197, 20])\n",
      "torch.Size([32, 197, 12, 20])\n",
      "torch.Size([32, 197, 20])\n",
      "torch.Size([32, 197, 12, 20])\n",
      "torch.Size([32, 197, 20])\n",
      "torch.Size([32, 197, 12, 20])\n",
      "torch.Size([32, 197, 20])\n",
      "torch.Size([32, 197, 12, 20])\n",
      "torch.Size([32, 197, 20])\n",
      "torch.Size([32, 197, 12, 20])\n",
      "torch.Size([32, 197, 20])\n",
      "torch.Size([32, 197, 12, 20])\n",
      "torch.Size([32, 197, 20])\n",
      "torch.Size([32, 197, 12, 20])\n",
      "torch.Size([32, 197, 20])\n",
      "torch.Size([32, 197, 12, 20])\n",
      "torch.Size([32, 197, 20])\n",
      "torch.Size([32, 197, 12, 20])\n",
      "torch.Size([32, 197, 20])\n"
     ]
    }
   ],
   "source": [
    "criterion = MoECtn()\n",
    "criterion.add(torch.nn.CrossEntropyLoss())\n",
    "for k, v in model.named_modules():\n",
    "    if k.endswith('router'):\n",
    "        v.register_forward_hook(criterion.compute_zloss)\n",
    "output = model(inputs)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[]\n",
      "tensor(0)\n",
      "[]\n",
      "tensor(0)\n",
      "tensor(0)\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "[]"
      ]
     },
     "execution_count": 22,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "print(criterion.routing_losses)\n",
    "print(criterion.route_value)\n",
    "print(criterion.additional_losses)\n",
    "print(criterion.additional_value)\n",
    "print(criterion.value)\n",
    "torch.stack(criterion.additional_losses).mean()\n",
    "# criterion.backward()\n",
    "criterion.additional_losses"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Parameter containing:\n",
       "tensor([[[ 0.0600,  0.1386,  0.0930,  0.2161,  0.0629, -0.0305,  0.1305,\n",
       "           0.0819, -0.1156,  0.0621,  0.2224, -0.0816, -0.0471,  0.0911,\n",
       "           0.0996, -0.0054,  0.0022,  0.0841, -0.1796, -0.0428,  0.0719,\n",
       "          -0.0735, -0.1720,  0.1237],\n",
       "         [-0.0959, -0.0976, -0.0285, -0.0955, -0.2049,  0.1933, -0.2066,\n",
       "          -0.2064,  0.0271,  0.1926,  0.1965, -0.1390, -0.0492,  0.0938,\n",
       "           0.1864, -0.1558, -0.0626,  0.1586, -0.0245,  0.0096,  0.2045,\n",
       "          -0.1962,  0.0441, -0.0022],\n",
       "         [ 0.2099, -0.1427,  0.0771,  0.1411,  0.0379,  0.0939, -0.0301,\n",
       "           0.0279, -0.1059, -0.0473,  0.1917, -0.2034, -0.0165, -0.2126,\n",
       "          -0.0253,  0.2146,  0.1835, -0.2117, -0.0096, -0.1936,  0.1729,\n",
       "          -0.0215, -0.0625, -0.0268],\n",
       "         [-0.0781, -0.1959,  0.1167, -0.2035, -0.1310,  0.1188,  0.1397,\n",
       "          -0.0500, -0.0824, -0.1835,  0.0905, -0.1998, -0.1121, -0.1391,\n",
       "           0.0096, -0.1077,  0.0308, -0.0406,  0.0252, -0.1332,  0.0263,\n",
       "          -0.2150,  0.1036,  0.1354],\n",
       "         [ 0.1213,  0.1835, -0.1725,  0.1850,  0.0416, -0.1427,  0.1663,\n",
       "          -0.1097,  0.1063, -0.0441, -0.0061,  0.1686,  0.1435, -0.1097,\n",
       "           0.1419, -0.1566,  0.1978, -0.0449, -0.1275, -0.0422, -0.1833,\n",
       "          -0.0271, -0.1822,  0.2106]]], requires_grad=True)"
      ]
     },
     "execution_count": 2,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import torch\n",
    "from torch.nn import init\n",
    "from torch import nn\n",
    "\n",
    "inp = torch.randn(64,10,34)\n",
    "num_experts = 1\n",
    "hidden_dim = 34\n",
    "output_dim = 24\n",
    "rank = 5\n",
    "dtype = torch.float32\n",
    "\n",
    "lora_init_A=nn.init.kaiming_uniform_\n",
    "lora_init_B=nn.init.kaiming_uniform_\n",
    "molora_a = nn.Parameter(\n",
    "    torch.empty((num_experts, hidden_dim, rank), dtype=dtype)\n",
    ")\n",
    "molora_b = nn.Parameter(\n",
    "    torch.empty(\n",
    "        (num_experts, rank, output_dim or hidden_dim),\n",
    "        dtype=dtype,\n",
    "    )\n",
    ")\n",
    "lora_init_A(molora_a)\n",
    "lora_init_B(molora_b)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [],
   "source": [
    "b, s, d = inp.shape\n",
    "x = inp.clone()\n",
    "ly = nn.Sequential(\n",
    "    nn.Linear(d, 64),\n",
    "    nn.ReLU(),\n",
    "    nn.Linear(64, 3),\n",
    ")\n",
    "b = torch.sum(ly(x))**2\n",
    "b.backward()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor([[ -323.5693,   -60.3996,   413.1722,  ...,   186.8324,   771.3909,\n",
       "          -461.6092],\n",
       "        [ -554.7523,  -702.2232,   -58.4136,  ..., -1198.1208,    70.9352,\n",
       "          -332.8121],\n",
       "        [-1853.2244,  -272.7122,  -843.0927,  ...,   246.4923,   636.1647,\n",
       "          1342.4110],\n",
       "        ...,\n",
       "        [ -277.8502,   -97.1855,   -97.4259,  ...,   537.3202,  -172.1196,\n",
       "           533.0807],\n",
       "        [  278.2059,  -151.3567,   529.2067,  ...,  -388.9011,  -644.4919,\n",
       "          -208.3191],\n",
       "        [  106.1750,   615.1868,   378.5650,  ...,   471.0874,  -409.7205,\n",
       "           376.1175]])"
      ]
     },
     "execution_count": 21,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "ly[0].weight.grad"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "tensor([ 56., 112., 168.])"
      ]
     },
     "execution_count": 32,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import torch\n",
    "\n",
    "# Create a tensor\n",
    "x = torch.tensor([1.0, 2.0, 3.0], requires_grad=True)\n",
    "\n",
    "# Perform some operations\n",
    "y = x ** 2\n",
    "z = torch.sum(y)**2\n",
    "\n",
    "# Perform backpropagation\n",
    "z.backward()\n",
    "\n",
    "# Clear gradients\n",
    "x.grad\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<PowBackward0 at 0x7fceb65717c0>"
      ]
     },
     "execution_count": 38,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "z.grad_fn"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "metadata": {},
   "outputs": [],
   "source": [
    "inp = x.clone()\n",
    "# Perform some operations\n",
    "o = inp ** 2\n",
    "lz = torch.sum(o)**2\n",
    "\n",
    "# Perform backpropagation\n",
    "lz.backward()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "-----------\n",
      "Loss report\n",
      "-----------\n"
     ]
    }
   ],
   "source": [
    "prefix = 'Loss report'\n",
    "\n",
    "# Add borders and center the text\n",
    "border = '-' * len(prefix)\n",
    "formatted_string = f\"\\n{border}\\n{prefix}\\n{border}\"\n",
    "\n",
    "# Print the formatted string\n",
    "print(formatted_string)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 44,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "This is the parent class method: 1\n",
      "This is the child class method: 2\n"
     ]
    }
   ],
   "source": [
    "class ParentClass:\n",
    "    def __init__(self):\n",
    "        self.c = 1\n",
    "\n",
    "    def some_method(self):\n",
    "        print(f\"This is the parent class method: {self.c}\")\n",
    "        self.c += 1\n",
    "\n",
    "\n",
    "class ChildClass(ParentClass):\n",
    "    def some_method(self):\n",
    "        super().some_method()  # 调用父类的方法\n",
    "        print(f\"This is the child class method: {self.c}\")\n",
    "\n",
    "\n",
    "child = ChildClass()\n",
    "child.some_method()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'202403051614_76.11_train'"
      ]
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from datetime import datetime\n",
    "cur_time = datetime.now().strftime(\"%Y%m%d%H%M\")\n",
    "bp_name = f\"{cur_time}_{76.11:.2f}_train\"\n",
    "bp_name"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "name 'torch' is not defined\n"
     ]
    }
   ],
   "source": [
    "try:\n",
    "    save_dict = torch.load(\n",
    "        os.path.join(exp_base_path, \"ckp_best.pt\")\n",
    "    )\n",
    "    model_st = save_dict[\"trainable\"]\n",
    "    opt_st = save_dict[\"opt_st\"]\n",
    "    scheduler_st = save_dict[\"scheduler_st\"]\n",
    "    acc = save_dict[\"acc\"]\n",
    "    ep = save_dict[\"ep\"]\n",
    "    # resume info\n",
    "    print(f\"Resume from {exp_base_path}, acc: {acc}, ep: {ep}\")\n",
    "except Exception as e:\n",
    "    errs = e\n",
    "    print(e)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "True\n"
     ]
    }
   ],
   "source": [
    "type(errs)\n",
    "print(isinstance(errs, Exception))\n",
    "# if isinstance(errs, Exception):\n",
    "#     print(\"a\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "torch.Tensor"
      ]
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from torch import nn\n",
    "\n",
    "la = nn.Linear(64, 1000)\n",
    "type(la.weight.data)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "py39",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.9.18"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
