{
 "cells": [
  {
   "cell_type": "markdown",
   "source": [
    "## 初始化qlib框架"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[15920:MainThread](2023-05-11 19:06:49,479) INFO - qlib.Initialization - [config.py:416] - default_conf: client.\n",
      "[15920:MainThread](2023-05-11 19:06:49,484) INFO - qlib.Initialization - [__init__.py:74] - qlib successfully initialized based on client settings.\n",
      "[15920:MainThread](2023-05-11 19:06:49,486) INFO - qlib.Initialization - [__init__.py:76] - data_path={'__DEFAULT_FREQ': WindowsPath('E:/LYX/QLibCsvData')}\n"
     ]
    },
    {
     "data": {
      "text/plain": "'E:\\\\LYX\\\\QLibCsvData'"
     },
     "execution_count": 9,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import os.path\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "import torch\n",
    "from pprint import pprint\n",
    "\n",
    "import qlib\n",
    "from qlib.constant import REG_CN\n",
    "from qlib.data.dataset.handler import DataHandlerLP\n",
    "from qlib.data.dataset import DatasetH\n",
    "from qlib.contrib.data.handler import Alpha158\n",
    "from qlib.contrib.evaluate import backtest_daily\n",
    "from qlib.contrib.evaluate import risk_analysis\n",
    "from qlib.contrib.strategy import TopkDropoutStrategy\n",
    "from qlib.contrib.model.pytorch_tabnet import TabnetModel, FinetuneModel, TabNet\n",
    "from src.EnvironmentVariables import BASE_PATH, MODELS_PATH\n",
    "from src.TrainerByQLib.EmotionAlpha158 import EmotionAlpha158\n",
    "from src.TrainerByQLib.TimeStrategy import TimingStrategyOverTopKDropout\n",
    "\n",
    "provider_uri = os.path.abspath(os.path.join(BASE_PATH, '..', 'QLibCsvData'))\n",
    "qlib.init(provider_uri=provider_uri, region=REG_CN)\n",
    "HAS_TRAIN = True\n",
    "USE_BASIC_STRATEGY = False\n",
    "provider_uri"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "## 获取沪深300成分股的alpha158因子序列"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[15920:MainThread](2023-05-11 19:07:25,669) INFO - qlib.timer - [log.py:128] - Time cost: 36.165s | Loading data Done\n",
      "[15920:MainThread](2023-05-11 19:07:26,014) INFO - qlib.timer - [log.py:128] - Time cost: 0.126s | DropnaLabel Done\n",
      "[15920:MainThread](2023-05-11 19:07:30,992) INFO - qlib.timer - [log.py:128] - Time cost: 4.977s | CSZScoreNorm Done\n",
      "[15920:MainThread](2023-05-11 19:07:31,015) INFO - qlib.timer - [log.py:128] - Time cost: 5.345s | fit & process data Done\n",
      "[15920:MainThread](2023-05-11 19:07:31,017) INFO - qlib.timer - [log.py:128] - Time cost: 41.513s | Init data Done\n"
     ]
    }
   ],
   "source": [
    "\n",
    "def get_stocks_dataset():\n",
    "    data_handler_config = {\n",
    "        \"start_time\": \"2000-01-01\",\n",
    "        \"end_time\": \"2023-04-01\",\n",
    "        \"fit_start_time\": \"2000-01-01\",\n",
    "        \"fit_end_time\": \"2018-12-31\",\n",
    "        \"instruments\": \"csi300\",\n",
    "    }\n",
    "\n",
    "    h = Alpha158(**data_handler_config)\n",
    "    segments = {\"pretrain\": (\"2000-01-01\", \"2014-12-31\"),\n",
    "                \"pretrain_validation\": (\"2015-01-01\", \"2016-12-31\"),\n",
    "                \"train\": (\"2000-01-01\", \"2018-12-31\"),\n",
    "                \"test\": (\"2019-1-1\", \"2019-12-31\"),\n",
    "                \"valid\": (\"2020-01-01\", \"2023-3-31\"), }\n",
    "    model_input_size = len(h.get_cols()) - 1\n",
    "    ds = DatasetH(h, segments=segments)\n",
    "    return ds, model_input_size\n",
    "\n",
    "\n",
    "stocks_dataset, stocks_model_input_size = get_stocks_dataset()"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "## 加载带情感维度的 沪深300指数 数据"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[15920:MainThread](2023-05-11 19:07:31,286) INFO - qlib.timer - [log.py:128] - Time cost: 0.200s | Loading data Done\n",
      "[15920:MainThread](2023-05-11 19:07:31,289) INFO - qlib.timer - [log.py:128] - Time cost: 0.002s | DropnaLabel Done\n",
      "[15920:MainThread](2023-05-11 19:07:31,291) INFO - qlib.timer - [log.py:128] - Time cost: 0.003s | fit & process data Done\n",
      "[15920:MainThread](2023-05-11 19:07:31,291) INFO - qlib.timer - [log.py:128] - Time cost: 0.205s | Init data Done\n"
     ]
    }
   ],
   "source": [
    "def get_emotion_dataset():\n",
    "    data_handler_config = {\n",
    "        \"start_time\": \"2000-01-01\",\n",
    "        \"end_time\": \"2023-04-01\",\n",
    "        \"fit_start_time\": \"2000-01-01\",\n",
    "        \"fit_end_time\": \"2018-12-31\",\n",
    "        \"instruments\": \"emotionindex\",\n",
    "    }\n",
    "\n",
    "    h = EmotionAlpha158(**data_handler_config)\n",
    "    segments = {\"pretrain\": (\"2014-12-01\", \"2016-12-31\"),\n",
    "                \"pretrain_validation\": (\"2017-1-1\", \"2018-1-1\"),\n",
    "                \"train\": (\"2014-01-01\", \"2018-12-31\"),\n",
    "                \"test\": (\"2019-1-1\", \"2019-12-31\"),\n",
    "                \"valid\": (\"2020-01-01\", \"2023-3-31\"), }\n",
    "    # get all the columns of the data\n",
    "    model_input_size = len(h.get_cols()) - 1\n",
    "    h.get_cols()\n",
    "    h.fetch(col_set=\"label\", data_key=DataHandlerLP.DK_L)\n",
    "    ds = DatasetH(h, segments=segments)\n",
    "    return ds, model_input_size\n",
    "\n",
    "\n",
    "emotion_dataset, emotion_model_input_size = get_emotion_dataset()"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "## 构建Tabnet模型"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "### 需要迁移学习的模型"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "outputs": [],
   "source": [
    "# 加载符合形状的参数\n",
    "def load_matching_parameters(tar_model, pretrained_dict):\n",
    "    model_dict = tar_model.state_dict()\n",
    "    matching_parameters = {k: v for k, v in pretrained_dict.items() if\n",
    "                           k in model_dict and model_dict[k].size() == v.size()}\n",
    "\n",
    "    # 将匹配的参数加载到模型的 state_dict 中\n",
    "    model_dict.update(matching_parameters)\n",
    "\n",
    "    # 加载更新后的 state_dict 到模型中\n",
    "    tar_model.load_state_dict(model_dict)\n",
    "\n",
    "\n",
    "def load_model2fit(model_input_size):\n",
    "    model_config = {\n",
    "        \"d_feat\": model_input_size,\n",
    "        \"batch_size\": 120,\n",
    "        \"lr\": 0.01,\n",
    "        \"GPU\": 0,\n",
    "        \"pretrain\": True,\n",
    "        \"out_dim\": 64,\n",
    "        \"final_out_dim\": 1,\n",
    "        \"vbs\": 2048,\n",
    "        \"relax\": 1.3,\n",
    "    }\n",
    "    model_path = os.path.join(MODELS_PATH, 'tabnet.weight')\n",
    "    model = TabnetModel(**model_config)\n",
    "    _ = FinetuneModel(model_config[\"out_dim\"],\n",
    "                      model_config[\"final_out_dim\"],\n",
    "                      TabNet(inp_dim=158,\n",
    "                             out_dim=model_config[\"out_dim\"],\n",
    "                             vbs=model_config[\"vbs\"],\n",
    "                             relax=model_config[\"relax\"]))\n",
    "    _.load_state_dict(torch.load(model_path), strict=False)\n",
    "    tabnet_weight = _.model.state_dict()\n",
    "    load_matching_parameters(model.tabnet_model, tabnet_weight)\n",
    "    return model\n"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "## 训练模型"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "outputs": [],
   "source": [
    "if not HAS_TRAIN:\n",
    "    pretrained_model2fit = load_model2fit(emotion_model_input_size)\n",
    "    pretrained_model2fit.fit(dataset=emotion_dataset, save_path=os.path.join(MODELS_PATH, 'emotion_tabnet.weight'))"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "### 直接加载模型"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[15920:MainThread](2023-05-11 19:07:31,340) INFO - qlib.TabNet - [pytorch_tabnet.py:78] - TabNet:\n",
      "batch_size : 120\n",
      "virtual bs : 2048\n",
      "device : cuda:0\n",
      "pretrain: True\n",
      "[15920:MainThread](2023-05-11 19:07:31,368) INFO - qlib.TabNet - [pytorch_tabnet.py:91] - model:\n",
      "TabNet(\n",
      "  (shared): ModuleList(\n",
      "    (0): Linear(in_features=158, out_features=256, bias=True)\n",
      "    (1): Linear(in_features=128, out_features=256, bias=True)\n",
      "  )\n",
      "  (first_step): FeatureTransformer(\n",
      "    (shared): ModuleList(\n",
      "      (0): GLU(\n",
      "        (fc): Linear(in_features=158, out_features=256, bias=True)\n",
      "        (bn): GBN(\n",
      "          (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "        )\n",
      "      )\n",
      "      (1): GLU(\n",
      "        (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "        (bn): GBN(\n",
      "          (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "        )\n",
      "      )\n",
      "    )\n",
      "    (independ): ModuleList(\n",
      "      (0): GLU(\n",
      "        (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "        (bn): GBN(\n",
      "          (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "        )\n",
      "      )\n",
      "      (1): GLU(\n",
      "        (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "        (bn): GBN(\n",
      "          (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "        )\n",
      "      )\n",
      "    )\n",
      "  )\n",
      "  (steps): ModuleList(\n",
      "    (0): DecisionStep(\n",
      "      (atten_tran): AttentionTransformer(\n",
      "        (fc): Linear(in_features=64, out_features=158, bias=True)\n",
      "        (bn): GBN(\n",
      "          (bn): BatchNorm1d(158, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "        )\n",
      "      )\n",
      "      (fea_tran): FeatureTransformer(\n",
      "        (shared): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=158, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "        (independ): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "      )\n",
      "    )\n",
      "    (1): DecisionStep(\n",
      "      (atten_tran): AttentionTransformer(\n",
      "        (fc): Linear(in_features=64, out_features=158, bias=True)\n",
      "        (bn): GBN(\n",
      "          (bn): BatchNorm1d(158, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "        )\n",
      "      )\n",
      "      (fea_tran): FeatureTransformer(\n",
      "        (shared): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=158, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "        (independ): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "      )\n",
      "    )\n",
      "    (2): DecisionStep(\n",
      "      (atten_tran): AttentionTransformer(\n",
      "        (fc): Linear(in_features=64, out_features=158, bias=True)\n",
      "        (bn): GBN(\n",
      "          (bn): BatchNorm1d(158, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "        )\n",
      "      )\n",
      "      (fea_tran): FeatureTransformer(\n",
      "        (shared): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=158, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "        (independ): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "      )\n",
      "    )\n",
      "    (3): DecisionStep(\n",
      "      (atten_tran): AttentionTransformer(\n",
      "        (fc): Linear(in_features=64, out_features=158, bias=True)\n",
      "        (bn): GBN(\n",
      "          (bn): BatchNorm1d(158, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "        )\n",
      "      )\n",
      "      (fea_tran): FeatureTransformer(\n",
      "        (shared): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=158, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "        (independ): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "      )\n",
      "    )\n",
      "  )\n",
      "  (fc): Linear(in_features=64, out_features=64, bias=True)\n",
      "  (bn): BatchNorm1d(158, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      ")\n",
      "TabNet_Decoder(\n",
      "  (shared): ModuleList(\n",
      "    (0): Linear(in_features=64, out_features=316, bias=True)\n",
      "    (1): Linear(in_features=158, out_features=316, bias=True)\n",
      "  )\n",
      "  (steps): ModuleList(\n",
      "    (0): DecoderStep(\n",
      "      (fea_tran): FeatureTransformer(\n",
      "        (shared): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=64, out_features=316, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(316, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=158, out_features=316, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(316, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "        (independ): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=158, out_features=316, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(316, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=158, out_features=316, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(316, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "      )\n",
      "      (fc): Linear(in_features=158, out_features=158, bias=True)\n",
      "    )\n",
      "    (1): DecoderStep(\n",
      "      (fea_tran): FeatureTransformer(\n",
      "        (shared): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=64, out_features=316, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(316, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=158, out_features=316, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(316, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "        (independ): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=158, out_features=316, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(316, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=158, out_features=316, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(316, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "      )\n",
      "      (fc): Linear(in_features=158, out_features=158, bias=True)\n",
      "    )\n",
      "    (2): DecoderStep(\n",
      "      (fea_tran): FeatureTransformer(\n",
      "        (shared): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=64, out_features=316, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(316, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=158, out_features=316, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(316, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "        (independ): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=158, out_features=316, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(316, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=158, out_features=316, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(316, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "      )\n",
      "      (fc): Linear(in_features=158, out_features=158, bias=True)\n",
      "    )\n",
      "    (3): DecoderStep(\n",
      "      (fea_tran): FeatureTransformer(\n",
      "        (shared): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=64, out_features=316, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(316, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=158, out_features=316, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(316, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "        (independ): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=158, out_features=316, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(316, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=158, out_features=316, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(316, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "      )\n",
      "      (fc): Linear(in_features=158, out_features=158, bias=True)\n",
      "    )\n",
      "    (4): DecoderStep(\n",
      "      (fea_tran): FeatureTransformer(\n",
      "        (shared): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=64, out_features=316, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(316, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=158, out_features=316, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(316, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "        (independ): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=158, out_features=316, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(316, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=158, out_features=316, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(316, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "      )\n",
      "      (fc): Linear(in_features=158, out_features=158, bias=True)\n",
      "    )\n",
      "  )\n",
      ")\n",
      "[15920:MainThread](2023-05-11 19:07:31,370) INFO - qlib.TabNet - [pytorch_tabnet.py:92] - model size: 1.1182 MB\n",
      "[15920:MainThread](2023-05-11 19:07:31,417) INFO - qlib.TabNet - [pytorch_tabnet.py:78] - TabNet:\n",
      "batch_size : 120\n",
      "virtual bs : 2048\n",
      "device : cuda:0\n",
      "pretrain: True\n",
      "[15920:MainThread](2023-05-11 19:07:31,446) INFO - qlib.TabNet - [pytorch_tabnet.py:91] - model:\n",
      "TabNet(\n",
      "  (shared): ModuleList(\n",
      "    (0): Linear(in_features=173, out_features=256, bias=True)\n",
      "    (1): Linear(in_features=128, out_features=256, bias=True)\n",
      "  )\n",
      "  (first_step): FeatureTransformer(\n",
      "    (shared): ModuleList(\n",
      "      (0): GLU(\n",
      "        (fc): Linear(in_features=173, out_features=256, bias=True)\n",
      "        (bn): GBN(\n",
      "          (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "        )\n",
      "      )\n",
      "      (1): GLU(\n",
      "        (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "        (bn): GBN(\n",
      "          (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "        )\n",
      "      )\n",
      "    )\n",
      "    (independ): ModuleList(\n",
      "      (0): GLU(\n",
      "        (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "        (bn): GBN(\n",
      "          (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "        )\n",
      "      )\n",
      "      (1): GLU(\n",
      "        (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "        (bn): GBN(\n",
      "          (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "        )\n",
      "      )\n",
      "    )\n",
      "  )\n",
      "  (steps): ModuleList(\n",
      "    (0): DecisionStep(\n",
      "      (atten_tran): AttentionTransformer(\n",
      "        (fc): Linear(in_features=64, out_features=173, bias=True)\n",
      "        (bn): GBN(\n",
      "          (bn): BatchNorm1d(173, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "        )\n",
      "      )\n",
      "      (fea_tran): FeatureTransformer(\n",
      "        (shared): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=173, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "        (independ): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "      )\n",
      "    )\n",
      "    (1): DecisionStep(\n",
      "      (atten_tran): AttentionTransformer(\n",
      "        (fc): Linear(in_features=64, out_features=173, bias=True)\n",
      "        (bn): GBN(\n",
      "          (bn): BatchNorm1d(173, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "        )\n",
      "      )\n",
      "      (fea_tran): FeatureTransformer(\n",
      "        (shared): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=173, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "        (independ): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "      )\n",
      "    )\n",
      "    (2): DecisionStep(\n",
      "      (atten_tran): AttentionTransformer(\n",
      "        (fc): Linear(in_features=64, out_features=173, bias=True)\n",
      "        (bn): GBN(\n",
      "          (bn): BatchNorm1d(173, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "        )\n",
      "      )\n",
      "      (fea_tran): FeatureTransformer(\n",
      "        (shared): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=173, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "        (independ): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "      )\n",
      "    )\n",
      "    (3): DecisionStep(\n",
      "      (atten_tran): AttentionTransformer(\n",
      "        (fc): Linear(in_features=64, out_features=173, bias=True)\n",
      "        (bn): GBN(\n",
      "          (bn): BatchNorm1d(173, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "        )\n",
      "      )\n",
      "      (fea_tran): FeatureTransformer(\n",
      "        (shared): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=173, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "        (independ): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=128, out_features=256, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(256, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "      )\n",
      "    )\n",
      "  )\n",
      "  (fc): Linear(in_features=64, out_features=64, bias=True)\n",
      "  (bn): BatchNorm1d(173, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      ")\n",
      "TabNet_Decoder(\n",
      "  (shared): ModuleList(\n",
      "    (0): Linear(in_features=64, out_features=346, bias=True)\n",
      "    (1): Linear(in_features=173, out_features=346, bias=True)\n",
      "  )\n",
      "  (steps): ModuleList(\n",
      "    (0): DecoderStep(\n",
      "      (fea_tran): FeatureTransformer(\n",
      "        (shared): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=64, out_features=346, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(346, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=173, out_features=346, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(346, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "        (independ): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=173, out_features=346, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(346, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=173, out_features=346, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(346, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "      )\n",
      "      (fc): Linear(in_features=173, out_features=173, bias=True)\n",
      "    )\n",
      "    (1): DecoderStep(\n",
      "      (fea_tran): FeatureTransformer(\n",
      "        (shared): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=64, out_features=346, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(346, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=173, out_features=346, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(346, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "        (independ): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=173, out_features=346, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(346, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=173, out_features=346, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(346, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "      )\n",
      "      (fc): Linear(in_features=173, out_features=173, bias=True)\n",
      "    )\n",
      "    (2): DecoderStep(\n",
      "      (fea_tran): FeatureTransformer(\n",
      "        (shared): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=64, out_features=346, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(346, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=173, out_features=346, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(346, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "        (independ): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=173, out_features=346, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(346, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=173, out_features=346, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(346, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "      )\n",
      "      (fc): Linear(in_features=173, out_features=173, bias=True)\n",
      "    )\n",
      "    (3): DecoderStep(\n",
      "      (fea_tran): FeatureTransformer(\n",
      "        (shared): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=64, out_features=346, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(346, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=173, out_features=346, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(346, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "        (independ): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=173, out_features=346, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(346, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=173, out_features=346, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(346, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "      )\n",
      "      (fc): Linear(in_features=173, out_features=173, bias=True)\n",
      "    )\n",
      "    (4): DecoderStep(\n",
      "      (fea_tran): FeatureTransformer(\n",
      "        (shared): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=64, out_features=346, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(346, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=173, out_features=346, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(346, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "        (independ): ModuleList(\n",
      "          (0): GLU(\n",
      "            (fc): Linear(in_features=173, out_features=346, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(346, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "          (1): GLU(\n",
      "            (fc): Linear(in_features=173, out_features=346, bias=True)\n",
      "            (bn): GBN(\n",
      "              (bn): BatchNorm1d(346, eps=1e-05, momentum=0.01, affine=True, track_running_stats=True)\n",
      "            )\n",
      "          )\n",
      "        )\n",
      "      )\n",
      "      (fc): Linear(in_features=173, out_features=173, bias=True)\n",
      "    )\n",
      "  )\n",
      ")\n",
      "[15920:MainThread](2023-05-11 19:07:31,448) INFO - qlib.TabNet - [pytorch_tabnet.py:92] - model size: 1.2569 MB\n"
     ]
    }
   ],
   "source": [
    "def load_trained_model(model_name='tabnet4singleStock.weight', model_input_size=stocks_model_input_size):\n",
    "    model_config = {\n",
    "        \"d_feat\": model_input_size,\n",
    "        \"batch_size\": 120,\n",
    "        \"lr\": 0.01,\n",
    "        \"GPU\": 0,\n",
    "        \"pretrain\": True,\n",
    "        \"out_dim\": 64,\n",
    "        \"final_out_dim\": 1,\n",
    "        \"vbs\": 2048,\n",
    "        \"relax\": 1.3,\n",
    "    }\n",
    "\n",
    "    model = TabnetModel(**model_config)\n",
    "    model.tabnet_model = FinetuneModel(64, 1, TabNet(inp_dim=model_input_size,\n",
    "                                                     out_dim=model_config[\"out_dim\"],\n",
    "                                                     vbs=model_config[\"vbs\"],\n",
    "                                                     relax=model_config[\"relax\"])).cuda()\n",
    "    model.tabnet_model.load_state_dict(torch.load(os.path.join(MODELS_PATH, model_name)))\n",
    "    model.fitted = True\n",
    "    return model\n",
    "\n",
    "\n",
    "stock_model = load_trained_model()\n",
    "emotion_model = load_trained_model('tabnet4dataWithEmotionIndex.weight', emotion_model_input_size)"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "## 回测"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "outputs": [],
   "source": [
    "stock_pred_score = stock_model.predict(stocks_dataset, segment=\"valid\")\n",
    "time_pred_score = emotion_model.predict(emotion_dataset, segment=\"valid\")"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[15920:MainThread](2023-05-11 19:20:12,086) WARNING - qlib.BaseExecutor - [executor.py:121] - `common_infra` is not set for <qlib.backtest.executor.SimulatorExecutor object at 0x0000023E641C3A60>\n",
      "[15920:MainThread](2023-05-11 19:20:12,091) INFO - qlib.backtest caller - [__init__.py:93] - Create new exchange\n",
      "[15920:MainThread](2023-05-11 19:20:28,178) WARNING - qlib.online operator - [exchange.py:219] - $close field data contains nan.\n",
      "[15920:MainThread](2023-05-11 19:20:28,181) WARNING - qlib.online operator - [exchange.py:219] - $close field data contains nan.\n",
      "[15920:MainThread](2023-05-11 19:20:28,188) WARNING - qlib.online operator - [exchange.py:226] - factor.day.bin file not exists or factor contains `nan`. Order using adjusted_price.\n",
      "[15920:MainThread](2023-05-11 19:20:28,189) WARNING - qlib.online operator - [exchange.py:228] - trade unit 100 is not supported in adjusted_price mode.\n"
     ]
    },
    {
     "data": {
      "text/plain": "backtest loop:   0%|          | 0/787 [00:00<?, ?it/s]",
      "application/vnd.jupyter.widget-view+json": {
       "version_major": 2,
       "version_minor": 0,
       "model_id": "eb9f61a64bc046a398cee6d7745a1108"
      }
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\python\\lib\\site-packages\\qlib\\utils\\index_data.py:482: RuntimeWarning:\n",
      "\n",
      "Mean of empty slice\n",
      "\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "                                                  risk\n",
      "excess_return_without_cost mean               0.000612\n",
      "                           std                0.005306\n",
      "                           annualized_return  0.145566\n",
      "                           information_ratio  1.778457\n",
      "                           max_drawdown      -0.098224\n",
      "                           sharp_ratio        1.411931\n",
      "                           beta               0.824554\n",
      "                           alpha             -0.000493\n",
      "                           calmar_ratio       1.176552\n",
      "excess_return_with_cost    mean               0.000410\n",
      "                           std                0.005304\n",
      "                           annualized_return  0.097472\n",
      "                           information_ratio  1.191294\n",
      "                           max_drawdown      -0.130647\n",
      "                           sharp_ratio        0.824635\n",
      "                           beta               0.824742\n",
      "                           alpha             -0.000326\n",
      "                           calmar_ratio       0.516441\n",
      "win rate with cost: 0.5426\n",
      "win rate with out cost: 0.5565\n"
     ]
    }
   ],
   "source": [
    "# 原始TopKDropout策略\n",
    "def get_topk_dropout_stra():\n",
    "    strategy_config = {\n",
    "        \"topk\": 50,\n",
    "        \"n_drop\": 5,\n",
    "        # pred_score, pd.Series\n",
    "        \"signal\": stock_pred_score,\n",
    "    }\n",
    "    strategy_obj = TopkDropoutStrategy(**strategy_config)\n",
    "    return strategy_obj\n",
    "\n",
    "\n",
    "def get_timing_stra_over_topk():\n",
    "    strategy_config = {\n",
    "        \"change_freq\": 5,\n",
    "        \"time_signal\": time_pred_score,\n",
    "        \"topk\": 50,\n",
    "        \"n_drop\": 5,\n",
    "        \"signal\": stock_pred_score,\n",
    "    }\n",
    "    strategy_obj = TimingStrategyOverTopKDropout(**strategy_config)\n",
    "    return strategy_obj\n",
    "\n",
    "from scipy.stats import linregress\n",
    "def financial_metrics(o_rep,r,b):\n",
    "    RISK_FREE_PROFIT = 0.03\n",
    "    ans = dict()\n",
    "    ana_rep = o_rep['risk']\n",
    "    mean = ana_rep['mean']\n",
    "    std = ana_rep['std']\n",
    "    annualized_return = ana_rep['annualized_return']\n",
    "    max_drawdown = ana_rep['max_drawdown']\n",
    "    ans['sharp_ratio'] = (mean- RISK_FREE_PROFIT/238)/std*np.sqrt(238)\n",
    "    ans['beta'],ans['alpha'],_,_,_ = linregress(r,b)\n",
    "    ans['calmar_ratio'] = -(annualized_return-RISK_FREE_PROFIT)/max_drawdown\n",
    "\n",
    "    return pd.concat([o_rep,pd.Series(ans).to_frame(\"risk\")])\n",
    "\n",
    "def backtest_and_show(strategy_obj):\n",
    "    report_normal, positions_normal = backtest_daily(\n",
    "        start_time=\"2020-01-01\", end_time=\"2023-3-31\", strategy=strategy_obj\n",
    "    )\n",
    "    analysis = dict()\n",
    "\n",
    "    # default frequency will be daily (i.e. \"day\")\n",
    "    analysis[\"excess_return_without_cost\"] = financial_metrics(risk_analysis(report_normal[\"return\"] - report_normal[\"bench\"]),\n",
    "                                                               report_normal[\"return\"],\n",
    "                                                               report_normal[\"bench\"])\n",
    "    win_rate_without_cost = (report_normal['return'] - report_normal[\"bench\"] > 0).value_counts(1)[True]\n",
    "    analysis[\"excess_return_with_cost\"] = financial_metrics(risk_analysis(\n",
    "        report_normal[\"return\"] - report_normal[\"bench\"] - report_normal[\"cost\"]),\n",
    "        report_normal[\"return\"] - report_normal[\"cost\"],\n",
    "        report_normal[\"bench\"]    )\n",
    "    win_rate_with_cost = (report_normal['return'] - report_normal[\"bench\"] - report_normal[\"cost\"] > 0).value_counts(1)[\n",
    "        True]\n",
    "    analysis_df = pd.concat(analysis)  # type: pd.DataFrame\n",
    "\n",
    "    pprint(analysis_df)\n",
    "    print(\"win rate with cost: %.4f\"%win_rate_with_cost)\n",
    "    print(\"win rate with out cost: %.4f\"%win_rate_without_cost)\n",
    "    import plotly\n",
    "    from qlib.contrib.report.analysis_position.report import report_graph\n",
    "\n",
    "    fig = report_graph(report_normal, show_notebook=False)\n",
    "    plotly.offline.plot(fig[0], filename=\"resultxxx.html\")\n",
    "    return report_normal\n",
    "\n",
    "\n",
    "if USE_BASIC_STRATEGY:\n",
    "    s_obj = get_topk_dropout_stra()\n",
    "else:\n",
    "    s_obj = get_timing_stra_over_topk()\n",
    "report = backtest_and_show(s_obj)"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "outputs": [],
   "source": [
    "\n",
    "USE_BASIC_STRATEGY = True\n"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "                       risk\n",
      "mean               0.000067\n",
      "std                0.012741\n",
      "annualized_return  0.015984\n",
      "information_ratio  0.081317\n",
      "max_drawdown      -0.473379\n",
      "sharp_ratio       -0.071309\n",
      "beta               1.000000\n",
      "alpha              0.000000\n",
      "calmar_ratio      -0.029609\n"
     ]
    }
   ],
   "source": [
    "pprint(financial_metrics(risk_analysis(report[\"bench\"]),\n",
    "                                            report[\"bench\"],\n",
    "                                            report[\"bench\"]))"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "outputs": [],
   "source": [],
   "metadata": {
    "collapsed": false
   }
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.10"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 1
}
