{
 "metadata": {
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.5-final"
  },
  "orig_nbformat": 2,
  "kernelspec": {
   "name": "python3",
   "display_name": "Python 3",
   "language": "python"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2,
 "cells": [
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "\"\"\"train resnet\"\"\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# import things\n",
    "import os\n",
    "import argparse\n",
    "import ast\n",
    "import yaml"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# import mindspore\n",
    "import mindspore.nn as nn\n",
    "import mindspore.common.initializer as weight_init\n",
    "from mindspore import context, Tensor\n",
    "from mindspore.nn.optim.momentum import Momentum\n",
    "from mindspore.train.model import Model\n",
    "from mindspore.context import ParallelMode\n",
    "from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor, TimeMonitor\n",
    "from mindspore.nn.loss import SoftmaxCrossEntropyWithLogits\n",
    "from mindspore.train.loss_scale_manager import FixedLossScaleManager\n",
    "from mindspore.train.serialization import load_checkpoint, load_param_into_net\n",
    "from mindspore.communication.management import init, get_rank, get_group_size\n",
    "from mindspore.common import set_seed"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# import src files\n",
    "# from src.lr_generator import get_lr, warmup_cosine_annealing_lr\n",
    "# from src.CrossEntropySmooth import CrossEntropySmooth\n",
    "from src.dataset import create_dataset2 as create_dataset\n",
    "from src.resnet import resnet50 as resnet\n",
    "from src.lr_generator import get_lr\n",
    "from src.CrossEntropySmooth import CrossEntropySmooth"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# obtain input parameters\n",
    "# parser = argparse.ArgumentParaser(description='Image classification')\n",
    "# parser.add_argument('--dataset_path', required=True, type=str, default=None, help='Dataset path')\n",
    "# args_opt = parser.parse_args()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# produce deterministic result\n",
    "set_seed(1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "yaml_config_path = './src/config.yaml'\n",
    "with open(yaml_config_path, mode='r', encoding='utf-8') as yaml_config_file:\n",
    "    yaml_config = yaml.safe_load(yaml_config_file)\n",
    "\n",
    "config = yaml_config['config2']"
   ]
  },
  {
   "source": [
    "if __name__ == '__main__':\n",
    "    # check point dir\n",
    "    ckpt_save_dir = config['save_checkpoint_path']\n"
   ],
   "cell_type": "code",
   "metadata": {},
   "execution_count": null,
   "outputs": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "    # configure execution env\n",
    "    # GRAPH_MODE is for static graph, PYNATIVE_MODE is for dynamic graph\n",
    "    # we use GPU\n",
    "    context.set_context(mode=context.GRAPH_MODE, device_target='GPU', save_graphs=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "    # create dataset\n",
    "    dataset = create_dataset(dataset_path=\"./mushroom-dataset/train\", do_train=True, repeat_num=1, batch_size=config['batch_size'])\n",
    "    step_size = dataset.get_dataset_size()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "    # define net\n",
    "    net = resnet(class_num=config['class_num'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "    # init weight\n",
    "    # iterate over all cells in net\n",
    "    for _, cell in net.cells_and_names():\n",
    "        # if it is a conv layer\n",
    "        if isinstance(cell, nn.Conv2d):\n",
    "            # initializer takes in params: init (tensor, str, initializer, numbers)\n",
    "            # usually use in-built initial method: He, Xavier\n",
    "            cell.weight.set_data(weight_init.initializer(weight_init.XavierUniform(),\n",
    "                                                             cell.weight.shape,\n",
    "                                                             cell.weight.dtype))\n",
    "        # if it is full connected layer\n",
    "        if isinstance(cell, nn.Dense):\n",
    "            cell.weight.set_data(weight_init.initializer(weight_init.TruncatedNormal(),\n",
    "                                                             cell.weight.shape,\n",
    "                                                             cell.weight.dtype))\n",
    "        "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "    # init lr\n",
    "    lr = get_lr(lr_init=config[\"lr_init\"], lr_end=config[\"lr_end\"], lr_max=config[\"lr_max\"],\n",
    "                    warmup_epochs=config[\"warmup_epochs\"], total_epochs=config[\"epoch_size\"], \n",
    "                    steps_per_epoch=step_size, lr_decay_mode=config[\"lr_decay_mode\"])\n",
    "    \n",
    "    # convert to tensor\n",
    "    lr = Tensor(lr)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "    # define optimizer\n",
    "    decayed_params = []\n",
    "    no_decayed_params = []\n",
    "\n",
    "    for param in net.trainable_params():\n",
    "        if 'beta' not in param.name and 'gamma' not in param.name and 'bias' not in param.name:\n",
    "            # beta & gamma for batchnorm\n",
    "            decayed_params.append(param)\n",
    "        else:\n",
    "            no_decayed_params.append(param)\n",
    "\n",
    "    group_params = [{'params': decayed_params, 'weight_decay': config[\"weight_decay\"]},\n",
    "                    {'params': no_decayed_params},\n",
    "                    {'order_params': net.trainable_params()}]\n",
    "    # use momentum, try adam next time\n",
    "    opt = Momentum(group_params, lr, config[\"momentum\"], loss_scale=config[\"loss_scale\"])\n",
    "    # opt = Momentum(group_params, lr, beta1=0.9, beta2=0.999, loss_scale=config[\"loss_scale\"])\n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "    # define loss\n",
    "    if not config[\"use_label_smooth\"]:\n",
    "        config[\"label_smooth_factor\"] = 0.0\n",
    "\n",
    "    # use cross entropy\n",
    "    loss = CrossEntropySmooth(sparse=True, reduction=\"mean\",\n",
    "                                smooth_factor=config[\"label_smooth_factor\"], num_classes=config[\"class_num\"])\n",
    "    \n",
    "    opt = Momentum(filter(lambda x: x.requires_grad, net.get_parameters()), lr, config[\"momentum\"], config[\"weight_decay\"],\n",
    "                           config[\"loss_scale\"])\n",
    "\n",
    "    loss_scale = FixedLossScaleManager(config[\"loss_scale\"], drop_overflow_update=False)\n",
    "\n",
    "    # Mixed precision mdodel\n",
    "    model = Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics={'acc'},\n",
    "                    amp_level=\"O2\", keep_batchnorm_fp32=True)\n",
    "                    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "    # define callbacks\n",
    "    # monitor time during training\n",
    "    time_cb = TimeMonitor(data_size=step_size)\n",
    "\n",
    "    # monitor loss\n",
    "    loss_cb = LossMonitor()\n",
    "\n",
    "    cb = [time_cb, loss_cb]\n",
    "    \n",
    "    if config[\"save_checkpoint\"]:\n",
    "        config_ck = CheckpointConfig(save_checkpoint_steps=config[\"save_checkpoint_epochs\"] * step_size,\n",
    "                                     keep_checkpoint_max=config[\"keep_checkpoint_max\"])\n",
    "        ckpt_cb = ModelCheckpoint(prefix=\"resnet\", directory=ckpt_save_dir, config=config_ck)\n",
    "        cb += [ckpt_cb]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "    # train model\n",
    "    if args_opt.net == \"se-resnet50\":\n",
    "        config[\"epoch_size\"] = config[\"train_epoch_size\"]\n",
    "\n",
    "    # sink when GPU or static graph mode\n",
    "    model.train(config[\"epoch_size\"] - config[\"pretrain_epoch_size\"], dataset, callbacks=cb,\n",
    "                dataset_sink_mode=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [
    {
     "output_type": "error",
     "ename": "KeyError",
     "evalue": "'use_label_smooth'",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mKeyError\u001b[0m                                  Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-13-51e6ef706396>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mtype\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mconfig\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m\"use_label_smooth\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
      "\u001b[0;31mKeyError\u001b[0m: 'use_label_smooth'"
     ]
    }
   ],
   "source": [
    "type(config[\"use_label_smooth\"])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ]
}