{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T02:03:04.857890Z",
     "iopub.status.busy": "2021-05-14T02:03:04.857363Z",
     "iopub.status.idle": "2021-05-14T02:03:05.246854Z",
     "shell.execute_reply": "2021-05-14T02:03:05.245867Z",
     "shell.execute_reply.started": "2021-05-14T02:03:04.857842Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "import pandas as pd\n",
    "from PIL import Image\n",
    "import codecs\n",
    "import os, re\n",
    "\n",
    "import matplotlib.pyplot as plt\n",
    "from IPython.display import set_matplotlib_formats\n",
    "%matplotlib inline\n",
    "set_matplotlib_formats('svg') # 输出为svg\n",
    "\n",
    "df = pd.read_csv('测试 第 1 轮/test1-utf8.csv')\n",
    "df['filename'] = 'input/训练集/image/' + df['filename'] # 改为本地路径"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 数据样例"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-13T23:47:00.040169Z",
     "iopub.status.busy": "2021-05-13T23:47:00.039624Z",
     "iopub.status.idle": "2021-05-13T23:47:00.057270Z",
     "shell.execute_reply": "2021-05-13T23:47:00.056533Z",
     "shell.execute_reply.started": "2021-05-13T23:47:00.040123Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>index</th>\n",
       "      <th>question_id</th>\n",
       "      <th>filename</th>\n",
       "      <th>question_text</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>628</th>\n",
       "      <td>629</td>\n",
       "      <td>Q00629</td>\n",
       "      <td>input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...</td>\n",
       "      <td>这是什么报？</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>629</th>\n",
       "      <td>630</td>\n",
       "      <td>Q00630</td>\n",
       "      <td>input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...</td>\n",
       "      <td>中国外交部在3月18日凌晨宣布了什么？</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>630</th>\n",
       "      <td>631</td>\n",
       "      <td>Q00631</td>\n",
       "      <td>input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...</td>\n",
       "      <td>2018年12月美方要求了哪些？</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>631</th>\n",
       "      <td>632</td>\n",
       "      <td>Q00632</td>\n",
       "      <td>input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...</td>\n",
       "      <td>2020年2月将5家中国媒体驻美机构被列为什么？</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>632</th>\n",
       "      <td>633</td>\n",
       "      <td>Q00633</td>\n",
       "      <td>input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...</td>\n",
       "      <td>5家中国媒体在美国的中国籍雇佣员将有什么变化？</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>633</th>\n",
       "      <td>634</td>\n",
       "      <td>Q00634</td>\n",
       "      <td>input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...</td>\n",
       "      <td>右上角是数字几？</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>634</th>\n",
       "      <td>635</td>\n",
       "      <td>Q00635</td>\n",
       "      <td>input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...</td>\n",
       "      <td>右上角什么汉字？</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>635</th>\n",
       "      <td>636</td>\n",
       "      <td>Q00636</td>\n",
       "      <td>input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...</td>\n",
       "      <td>左下角什么内容？</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>636</th>\n",
       "      <td>637</td>\n",
       "      <td>Q00637</td>\n",
       "      <td>input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...</td>\n",
       "      <td>外交部宣布对什么采取措施？</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>637</th>\n",
       "      <td>638</td>\n",
       "      <td>Q00638</td>\n",
       "      <td>input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...</td>\n",
       "      <td>针对美方对中国记者在哪些方面采取其实性别措施？</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>638</th>\n",
       "      <td>639</td>\n",
       "      <td>Q00639</td>\n",
       "      <td>input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...</td>\n",
       "      <td>美国国务院3月2日以什么为由宣布？</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "     index question_id                                           filename  \\\n",
       "628    629      Q00629  input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...   \n",
       "629    630      Q00630  input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...   \n",
       "630    631      Q00631  input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...   \n",
       "631    632      Q00632  input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...   \n",
       "632    633      Q00633  input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...   \n",
       "633    634      Q00634  input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...   \n",
       "634    635      Q00635  input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...   \n",
       "635    636      Q00636  input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...   \n",
       "636    637      Q00637  input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...   \n",
       "637    638      Q00638  input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...   \n",
       "638    639      Q00639  input/训练集/image/eb12d5c8c1cf5a82a308e3244ed22d...   \n",
       "\n",
       "                question_text  \n",
       "628                    这是什么报？  \n",
       "629       中国外交部在3月18日凌晨宣布了什么？  \n",
       "630          2018年12月美方要求了哪些？  \n",
       "631  2020年2月将5家中国媒体驻美机构被列为什么？  \n",
       "632   5家中国媒体在美国的中国籍雇佣员将有什么变化？  \n",
       "633                  右上角是数字几？  \n",
       "634                  右上角什么汉字？  \n",
       "635                  左下角什么内容？  \n",
       "636             外交部宣布对什么采取措施？  \n",
       "637   针对美方对中国记者在哪些方面采取其实性别措施？  \n",
       "638         美国国务院3月2日以什么为由宣布？  "
      ]
     },
     "execution_count": 2,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df[df['filename'] == df['filename'].iloc[629]]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-13T23:47:01.191381Z",
     "iopub.status.busy": "2021-05-13T23:47:01.190795Z",
     "iopub.status.idle": "2021-05-13T23:47:01.203487Z",
     "shell.execute_reply": "2021-05-13T23:47:01.202834Z",
     "shell.execute_reply.started": "2021-05-13T23:47:01.191333Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>index</th>\n",
       "      <th>question_id</th>\n",
       "      <th>filename</th>\n",
       "      <th>question_text</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1</td>\n",
       "      <td>Q00001</td>\n",
       "      <td>input/训练集/image/2-603986_20190430_4_119.jpg</td>\n",
       "      <td>证书有效期至什么时候？</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>2</td>\n",
       "      <td>Q00002</td>\n",
       "      <td>input/训练集/image/2-603986_20190430_4_119.jpg</td>\n",
       "      <td>此单据经什么部门审查，批准？</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>3</td>\n",
       "      <td>Q00003</td>\n",
       "      <td>input/训练集/image/2-603986_20190430_4_119.jpg</td>\n",
       "      <td>图片右上角的阿拉伯数字是什么？</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>4</td>\n",
       "      <td>Q00004</td>\n",
       "      <td>input/训练集/image/2-603986_20190430_4_119.jpg</td>\n",
       "      <td>图片正上方的文字是什么/</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>5</td>\n",
       "      <td>Q00005</td>\n",
       "      <td>input/训练集/image/2-603986_20190430_4_119.jpg</td>\n",
       "      <td>图片左下角的文字是什么内容？</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>...</th>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8610</th>\n",
       "      <td>8611</td>\n",
       "      <td>Q08611</td>\n",
       "      <td>input/训练集/image/b1867b04e902f91846a416fdaee3de...</td>\n",
       "      <td>该报纸正上方中间的一行文字的内容是什么？</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8611</th>\n",
       "      <td>8612</td>\n",
       "      <td>Q08612</td>\n",
       "      <td>input/训练集/image/b1867b04e902f91846a416fdaee3de...</td>\n",
       "      <td>该报纸中的周晨指的是什么？</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8612</th>\n",
       "      <td>8613</td>\n",
       "      <td>Q08613</td>\n",
       "      <td>input/训练集/image/b1867b04e902f91846a416fdaee3de...</td>\n",
       "      <td>该报纸中什么强硬拒绝了土耳其要求？</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8613</th>\n",
       "      <td>8614</td>\n",
       "      <td>Q08614</td>\n",
       "      <td>input/训练集/image/b1867b04e902f91846a416fdaee3de...</td>\n",
       "      <td>什么问题谈不拢？</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8614</th>\n",
       "      <td>8615</td>\n",
       "      <td>Q08615</td>\n",
       "      <td>input/训练集/image/5bfcb1f7N9a0c4ce6.jpg</td>\n",
       "      <td>该产品水泥墙的安装方法是什么？</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>8615 rows × 4 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "      index question_id                                           filename  \\\n",
       "0         1      Q00001        input/训练集/image/2-603986_20190430_4_119.jpg   \n",
       "1         2      Q00002        input/训练集/image/2-603986_20190430_4_119.jpg   \n",
       "2         3      Q00003        input/训练集/image/2-603986_20190430_4_119.jpg   \n",
       "3         4      Q00004        input/训练集/image/2-603986_20190430_4_119.jpg   \n",
       "4         5      Q00005        input/训练集/image/2-603986_20190430_4_119.jpg   \n",
       "...     ...         ...                                                ...   \n",
       "8610   8611      Q08611  input/训练集/image/b1867b04e902f91846a416fdaee3de...   \n",
       "8611   8612      Q08612  input/训练集/image/b1867b04e902f91846a416fdaee3de...   \n",
       "8612   8613      Q08613  input/训练集/image/b1867b04e902f91846a416fdaee3de...   \n",
       "8613   8614      Q08614  input/训练集/image/b1867b04e902f91846a416fdaee3de...   \n",
       "8614   8615      Q08615              input/训练集/image/5bfcb1f7N9a0c4ce6.jpg   \n",
       "\n",
       "             question_text  \n",
       "0              证书有效期至什么时候？  \n",
       "1           此单据经什么部门审查，批准？  \n",
       "2          图片右上角的阿拉伯数字是什么？  \n",
       "3             图片正上方的文字是什么/  \n",
       "4           图片左下角的文字是什么内容？  \n",
       "...                    ...  \n",
       "8610  该报纸正上方中间的一行文字的内容是什么？  \n",
       "8611         该报纸中的周晨指的是什么？  \n",
       "8612     该报纸中什么强硬拒绝了土耳其要求？  \n",
       "8613              什么问题谈不拢？  \n",
       "8614       该产品水泥墙的安装方法是什么？  \n",
       "\n",
       "[8615 rows x 4 columns]"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "tags": []
   },
   "outputs": [],
   "source": [
    "# 对于所有的数据集，迭代每一行\n",
    "# 步骤1：判断OCR是否识别\n",
    "\n",
    "from tqdm import tqdm\n",
    "\n",
    "test_result = []\n",
    "for row in tqdm(df.iloc[:].iterrows()):\n",
    "    qs = row[1].question_text\n",
    "    # ocr是否识别成功\n",
    "    if not os.path.exists('ocr_result/'+os.path.basename(row[1]['filename'])[:-4] + '.txt'):\n",
    "        continue\n",
    "    \n",
    "    # 读取ocr识别结果\n",
    "    ocrs = codecs.open('ocr_result/'+os.path.basename(row[1]['filename'])[:-4] + '.txt').readlines()[:]\n",
    "    # 文字\n",
    "    ocr_text = [x.split('\\t')[1].strip() for x in ocrs]\n",
    "    # 文本框\n",
    "    ocr_box = [x.split('\\t')[0].strip() for x in ocrs]\n",
    "    \n",
    "    candicate_text = None\n",
    "    if re.findall('什么药品', qs):\n",
    "#         pass\n",
    "        candicate_text = ocr_text[0]\n",
    "        \n",
    "    elif re.findall('说明书来源于哪里', qs):\n",
    "        candicate_text = list(set([x for x in ocr_text if '说明书' in x]))\n",
    "        candicate_text = [x for x in candicate_text if '说明书' in x][0]\n",
    "        candicate_text = candicate_text.replace('说明书', '').replace('来源', '').replace('：', '')\n",
    "    elif re.findall('什么大学什么专业', qs):\n",
    "        candicate_text = list(set([x for x in ocr_text if '大学' in x and '专业' in x]))\n",
    "    elif re.findall('什么大学', qs):\n",
    "        candicate_text = list(set([x for x in ocr_text if re.findall('大学', x)]))\n",
    "        if len(candicate_text) == 0:\n",
    "            continue\n",
    "        \n",
    "    elif re.findall('什么专业', qs):\n",
    "        candicate_text = list(set([x for x in ocr_text if re.findall('本科', x)]))\n",
    "        \n",
    "    elif re.findall('邮箱', qs):\n",
    "        candicate_text = list(set([x for x in ocr_text if re.findall('@', x)]))\n",
    "\n",
    "\n",
    "    if candicate_text is None:\n",
    "        test_result.append({'questionId': row[1].question_id, 'answer': ''})\n",
    "    elif isinstance(candicate_text, list):\n",
    "        if len(candicate_text) > 0:\n",
    "            test_result.append({'questionId': row[1].question_id, 'answer': candicate_text[0]})\n",
    "        else:\n",
    "            test_result.append({'questionId': row[1].question_id, 'answer': ''})\n",
    "    else:\n",
    "        test_result.append({'questionId': row[1].question_id, 'answer': candicate_text})\n",
    "    \n",
    "\n",
    "        # break\n",
    "    # break"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "tags": []
   },
   "outputs": [],
   "source": [
    "import json\n",
    "with open('answer.json', 'w') as up:\n",
    "    json.dump(test_result, up)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "tags": []
   },
   "outputs": [],
   "source": [
    "!zip -r answer.zip answer.json"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "tags": []
   },
   "outputs": [],
   "source": [
    "df"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Bert"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T02:03:31.078142Z",
     "iopub.status.busy": "2021-05-14T02:03:31.077580Z",
     "iopub.status.idle": "2021-05-14T02:03:31.164472Z",
     "shell.execute_reply": "2021-05-14T02:03:31.164010Z",
     "shell.execute_reply.started": "2021-05-14T02:03:31.078096Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "train_df = pd.read_csv('input/train-utf8.csv')\n",
    "train_df['filename'] = 'input/训练集/image/' + train_df['filename'] # 改为本地路径"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T02:03:31.437891Z",
     "iopub.status.busy": "2021-05-14T02:03:31.437352Z",
     "iopub.status.idle": "2021-05-14T02:03:31.445963Z",
     "shell.execute_reply": "2021-05-14T02:03:31.445258Z",
     "shell.execute_reply.started": "2021-05-14T02:03:31.437843Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "def read_ocr(row):\n",
    "    if not os.path.exists('ocr_result/'+os.path.basename(str(row[1]['filename']))[:-4] + '.txt'):\n",
    "        return ''\n",
    "    \n",
    "    ocrs = codecs.open('ocr_result/'+os.path.basename(str(row[1]['filename']))[:-4] + '.txt').readlines()[:]\n",
    "    # 文字\n",
    "    ocr_text = [x.split('\\t')[1].strip() for x in ocrs]\n",
    "    return ''.join(ocr_text).replace(' ', '')\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T02:03:31.987738Z",
     "iopub.status.busy": "2021-05-14T02:03:31.987235Z",
     "iopub.status.idle": "2021-05-14T02:03:38.926295Z",
     "shell.execute_reply": "2021-05-14T02:03:38.925627Z",
     "shell.execute_reply.started": "2021-05-14T02:03:31.987693Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "CPU times: user 6.7 s, sys: 238 ms, total: 6.94 s\n",
      "Wall time: 6.93 s\n"
     ]
    }
   ],
   "source": [
    "%%time\n",
    "train_text = [read_ocr(x) for x in train_df.iloc[:].iterrows()]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T02:03:43.307969Z",
     "iopub.status.busy": "2021-05-14T02:03:43.307449Z",
     "iopub.status.idle": "2021-05-14T02:03:45.927108Z",
     "shell.execute_reply": "2021-05-14T02:03:45.926304Z",
     "shell.execute_reply.started": "2021-05-14T02:03:43.307923Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "train_df.loc[:, 'text'] = train_text"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T02:03:45.928059Z",
     "iopub.status.busy": "2021-05-14T02:03:45.927881Z",
     "iopub.status.idle": "2021-05-14T02:03:46.188357Z",
     "shell.execute_reply": "2021-05-14T02:03:46.187672Z",
     "shell.execute_reply.started": "2021-05-14T02:03:45.928044Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(41117, 6)\n",
      "(41113, 6)\n"
     ]
    }
   ],
   "source": [
    "print(train_df.shape)\n",
    "train_df = train_df[~train_df.apply(lambda x: len(x[-1]) == 0, axis=1)]\n",
    "print(train_df.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T02:03:46.992897Z",
     "iopub.status.busy": "2021-05-14T02:03:46.992403Z",
     "iopub.status.idle": "2021-05-14T02:03:46.996995Z",
     "shell.execute_reply": "2021-05-14T02:03:46.996089Z",
     "shell.execute_reply.started": "2021-05-14T02:03:46.992871Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "def str_index(doc, query):\n",
    "    try:\n",
    "        return doc.index(query), len(query)\n",
    "    except:\n",
    "        return 0, 0"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T02:03:47.188145Z",
     "iopub.status.busy": "2021-05-14T02:03:47.187642Z",
     "iopub.status.idle": "2021-05-14T02:03:47.543012Z",
     "shell.execute_reply": "2021-05-14T02:03:47.542354Z",
     "shell.execute_reply.started": "2021-05-14T02:03:47.188101Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "train_df['index'] = train_df.apply(lambda x: str_index(x[-1], x[-2]), axis=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T02:03:47.543963Z",
     "iopub.status.busy": "2021-05-14T02:03:47.543794Z",
     "iopub.status.idle": "2021-05-14T02:03:47.565520Z",
     "shell.execute_reply": "2021-05-14T02:03:47.565115Z",
     "shell.execute_reply.started": "2021-05-14T02:03:47.543948Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "train_df['index_start'] = train_df['index'].apply(lambda x: x[0])\n",
    "train_df['index_end'] = train_df['index'].apply(lambda x: x[1])\n",
    "\n",
    "train_df['index_end'] = train_df['index_start'] + train_df['index_end']"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T02:03:49.007747Z",
     "iopub.status.busy": "2021-05-14T02:03:49.007217Z",
     "iopub.status.idle": "2021-05-14T02:03:49.039970Z",
     "shell.execute_reply": "2021-05-14T02:03:49.039411Z",
     "shell.execute_reply.started": "2021-05-14T02:03:49.007700Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "train_df = train_df[train_df['index_end'] !=0]\n",
    "train_df = train_df[train_df['index_end'] < 480]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T02:04:29.818035Z",
     "iopub.status.busy": "2021-05-14T02:04:29.817549Z",
     "iopub.status.idle": "2021-05-14T02:05:45.467753Z",
     "shell.execute_reply": "2021-05-14T02:05:45.467099Z",
     "shell.execute_reply.started": "2021-05-14T02:04:29.818000Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "95564b03a197464d8da6c23b92dda9c1",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=109540.0, style=ProgressStyle(descripti…"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n"
     ]
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "16ce46fb8ab6405a89701d7ba8821c17",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=268961.0, style=ProgressStyle(descripti…"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n"
     ]
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "c0372c0500f946d38d172ea7bc35a4b9",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=2.0, style=ProgressStyle(description_wi…"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n"
     ]
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "d71ce88ff4a549d69decf182bb0b627e",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=112.0, style=ProgressStyle(description_…"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n"
     ]
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "cc59c263e2fc4e9e91b1110d111ad466",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=19.0, style=ProgressStyle(description_w…"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n"
     ]
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "909ce6d9b65f40e4a84ffab70700e952",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=647.0, style=ProgressStyle(description_…"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n"
     ]
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "55578e4fa33f4c6f9779fb21ec34cbdd",
       "version_major": 2,
       "version_minor": 0
      },
      "text/plain": [
       "HBox(children=(FloatProgress(value=0.0, description='Downloading', max=411578458.0, style=ProgressStyle(descri…"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Some weights of the model checkpoint at hfl/chinese-bert-wwm-ext were not used when initializing BertForQuestionAnswering: ['cls.predictions.bias', 'cls.predictions.transform.dense.weight', 'cls.predictions.transform.dense.bias', 'cls.predictions.transform.LayerNorm.weight', 'cls.predictions.transform.LayerNorm.bias', 'cls.predictions.decoder.weight', 'cls.seq_relationship.weight', 'cls.seq_relationship.bias']\n",
      "- This IS expected if you are initializing BertForQuestionAnswering from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).\n",
      "- This IS NOT expected if you are initializing BertForQuestionAnswering from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).\n",
      "Some weights of BertForQuestionAnswering were not initialized from the model checkpoint at hfl/chinese-bert-wwm-ext and are newly initialized: ['qa_outputs.weight', 'qa_outputs.bias']\n",
      "You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n"
     ]
    }
   ],
   "source": [
    "from transformers import BertTokenizerFast, BertForQuestionAnswering\n",
    "\n",
    "tokenizer = BertTokenizerFast.from_pretrained('hfl/chinese-bert-wwm-ext')\n",
    "model = BertForQuestionAnswering.from_pretrained('hfl/chinese-bert-wwm-ext')\n",
    "\n",
    "train_encodings = tokenizer(list(train_df['text'])[:], list(train_df['question_text'])[:], \n",
    "                            return_tensors='pt', truncation=True, padding=True,\n",
    "                           max_length=512)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T02:07:28.287885Z",
     "iopub.status.busy": "2021-05-14T02:07:28.287313Z",
     "iopub.status.idle": "2021-05-14T02:07:28.351608Z",
     "shell.execute_reply": "2021-05-14T02:07:28.351074Z",
     "shell.execute_reply.started": "2021-05-14T02:07:28.287837Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "train_encodings['start_positions'] = [train_encodings.char_to_token(idx, x) for idx, x in enumerate(train_df['index_start'].values[:])]\n",
    "train_encodings['end_positions'] = [train_encodings.char_to_token(idx, x-1) for idx, x in enumerate(train_df['index_end'].values[:])]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T02:07:29.077726Z",
     "iopub.status.busy": "2021-05-14T02:07:29.077199Z",
     "iopub.status.idle": "2021-05-14T02:07:29.087088Z",
     "shell.execute_reply": "2021-05-14T02:07:29.086455Z",
     "shell.execute_reply.started": "2021-05-14T02:07:29.077679Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[]"
      ]
     },
     "execution_count": 23,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "[x for x in train_encodings['end_positions'] if x == None]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T02:07:31.098087Z",
     "iopub.status.busy": "2021-05-14T02:07:31.097550Z",
     "iopub.status.idle": "2021-05-14T02:07:31.106965Z",
     "shell.execute_reply": "2021-05-14T02:07:31.106377Z",
     "shell.execute_reply.started": "2021-05-14T02:07:31.098041Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "import torch\n",
    "from torch.utils.data import Dataset, DataLoader, TensorDataset\n",
    "    \n",
    "import torch\n",
    "\n",
    "class SquadDataset(torch.utils.data.Dataset):\n",
    "    def __init__(self, encodings):\n",
    "        self.encodings = encodings\n",
    "\n",
    "    def __getitem__(self, idx):\n",
    "        return {key: torch.tensor(val[idx]) for key, val in self.encodings.items()}\n",
    "\n",
    "    def __len__(self):\n",
    "        return len(self.encodings.input_ids)\n",
    "\n",
    "train_dataset = SquadDataset(train_encodings)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T02:07:40.192498Z",
     "iopub.status.busy": "2021-05-14T02:07:40.191929Z",
     "iopub.status.idle": "2021-05-14T03:07:45.345382Z",
     "shell.execute_reply": "2021-05-14T03:07:45.344640Z",
     "shell.execute_reply.started": "2021-05-14T02:07:40.192449Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/usr/local/lib/python3.6/dist-packages/ipykernel_launcher.py:11: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  # This is added back by InteractiveShellApp.init_path()\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "6.392827987670898 0.0 0.0\n",
      "4.641648769378662 0.0 0.0\n",
      "3.6394097805023193 0.125 0.125\n",
      "3.7527449131011963 0.125 0.25\n",
      "4.01697301864624 0.0 0.0\n",
      "5.268149375915527 0.125 0.125\n",
      "3.655611753463745 0.25 0.25\n",
      "3.401010036468506 0.125 0.125\n",
      "2.8646187782287598 0.0 0.0\n",
      "2.3008298873901367 0.375 0.625\n",
      "3.1255717277526855 0.5 0.25\n",
      "4.104433536529541 0.25 0.125\n",
      "1.9003040790557861 0.375 0.75\n",
      "3.4407365322113037 0.125 0.25\n",
      "2.717897415161133 0.625 0.375\n",
      "2.4693455696105957 0.625 0.5\n",
      "2.2299017906188965 0.5 0.375\n",
      "2.390782356262207 0.5 0.5\n",
      "1.9221857786178589 0.625 0.375\n",
      "2.609260320663452 0.375 0.375\n",
      "1.9003806114196777 0.625 0.375\n",
      "1.879948616027832 0.375 0.375\n",
      "2.43220853805542 0.5 0.375\n",
      "2.430473804473877 0.375 0.375\n",
      "2.999849796295166 0.25 0.0\n",
      "2.43666934967041 0.625 0.375\n",
      "2.276320457458496 0.5 0.5\n",
      "3.106001853942871 0.5 0.375\n",
      "1.8396708965301514 0.375 0.25\n",
      "2.252828359603882 0.5 0.5\n",
      "2.288601875305176 0.375 0.375\n",
      "2.990363597869873 0.25 0.375\n",
      "1.7852578163146973 0.5 0.5\n",
      "2.253142833709717 0.5 0.5\n",
      "1.2141497135162354 0.5 0.25\n",
      "3.279722213745117 0.5 0.125\n",
      "1.6049631834030151 0.625 0.75\n",
      "1.48598313331604 0.5 0.375\n",
      "1.3949215412139893 0.5 0.625\n",
      "1.759171485900879 0.5 0.375\n",
      "2.1499581336975098 0.375 0.5\n",
      "1.1085712909698486 0.875 0.75\n",
      "0.8576023578643799 0.625 0.75\n",
      "1.8861654996871948 0.625 0.625\n",
      "2.9053902626037598 0.375 0.375\n",
      "2.0016698837280273 0.5 0.625\n",
      "1.5113856792449951 0.5 0.5\n",
      "1.0637376308441162 0.75 0.875\n",
      "2.5001726150512695 0.5 0.5\n",
      "1.9042553901672363 0.5 0.375\n",
      "1.9449434280395508 0.375 0.5\n",
      "1.5534191131591797 0.625 0.375\n",
      "1.471686840057373 0.75 0.5\n",
      "2.7026185989379883 0.375 0.125\n",
      "1.85944664478302 0.625 0.25\n",
      "2.4838013648986816 0.5 0.5\n",
      "2.2240586280822754 0.375 0.5\n",
      "2.0925774574279785 0.5 0.375\n",
      "1.870011329650879 0.625 0.625\n",
      "2.3207719326019287 0.375 0.75\n",
      "1.2804183959960938 0.625 0.625\n",
      "2.4442715644836426 0.375 0.25\n",
      "1.7749018669128418 0.5 0.5\n",
      "2.955740213394165 0.5 0.375\n",
      "1.3804689645767212 0.625 0.875\n",
      "1.8582680225372314 0.625 0.375\n",
      "1.7834455966949463 0.625 0.5\n",
      "1.0419468879699707 0.75 0.625\n",
      "1.2380716800689697 0.75 0.625\n",
      "1.007293462753296 0.75 0.625\n",
      "1.992365837097168 0.75 0.5\n",
      "1.8252346515655518 0.5 0.75\n",
      "2.3325469493865967 0.5 0.5\n",
      "2.507690906524658 0.25 0.375\n",
      "1.4026901721954346 0.5 0.625\n",
      "2.0410821437835693 0.75 0.375\n",
      "1.7702564001083374 0.5 0.625\n",
      "1.9070724248886108 0.75 0.5\n",
      "1.9921326637268066 0.25 0.75\n",
      "1.5368733406066895 0.625 0.5\n",
      "0.8113269805908203 0.875 0.875\n",
      "2.1702494621276855 0.5 0.25\n",
      "1.6483665704727173 0.75 0.375\n",
      "1.0374252796173096 0.375 0.5\n",
      "1.3712246417999268 0.625 0.625\n",
      "2.484956741333008 0.25 0.25\n",
      "2.342773914337158 0.5 0.375\n",
      "1.1638951301574707 0.625 0.875\n",
      "1.8350059986114502 0.75 0.5\n",
      "2.2399330139160156 0.5 0.5\n",
      "2.9980196952819824 0.375 0.375\n",
      "1.696144938468933 0.75 0.5\n",
      "1.997018814086914 0.375 0.375\n",
      "2.8092222213745117 0.375 0.5\n",
      "2.338848829269409 0.625 0.625\n",
      "2.0781166553497314 0.375 0.375\n",
      "1.8240917921066284 0.5 0.5\n",
      "1.2407413721084595 0.625 0.625\n",
      "1.6876821517944336 0.375 0.375\n",
      "2.111816883087158 0.375 0.375\n",
      "0.7328478097915649 0.875 0.875\n",
      "1.098351240158081 0.625 0.875\n",
      "1.1736536026000977 0.625 0.625\n",
      "1.4639273881912231 0.5 0.625\n",
      "1.1476690769195557 0.875 0.75\n",
      "1.5614596605300903 0.5 0.5\n",
      "1.4202532768249512 0.5 0.625\n",
      "1.0316578149795532 0.75 0.625\n",
      "3.39052677154541 0.375 0.5\n",
      "1.8210546970367432 0.5 0.5\n",
      "1.4988974332809448 0.5 0.5\n",
      "1.5469688177108765 0.625 0.5\n",
      "1.5910887718200684 0.625 0.375\n",
      "1.799992322921753 0.5 0.5\n",
      "2.1880791187286377 0.625 0.375\n",
      "1.6388477087020874 0.75 0.5\n",
      "1.6297459602355957 0.625 0.5\n",
      "3.1526193618774414 0.375 0.25\n",
      "3.0343103408813477 0.375 0.375\n",
      "1.1938809156417847 0.75 0.75\n",
      "3.2607128620147705 0.125 0.125\n",
      "1.411433458328247 0.75 0.625\n",
      "0.6635432243347168 0.875 0.75\n",
      "1.5250983238220215 0.75 0.75\n",
      "0.6322778463363647 0.875 0.875\n",
      "2.612455129623413 0.5 0.375\n",
      "1.286920189857483 0.5 0.625\n",
      "1.4533624649047852 0.625 0.625\n",
      "1.58584725856781 0.75 0.5\n",
      "3.010974407196045 0.25 0.375\n",
      "2.2098429203033447 0.375 0.375\n",
      "1.488197922706604 0.375 0.5\n",
      "2.575443744659424 0.375 0.375\n",
      "1.7455499172210693 0.625 0.5\n",
      "1.7601017951965332 0.5 0.375\n",
      "2.7348427772521973 0.625 0.5\n",
      "1.7407982349395752 0.625 0.625\n",
      "0.9054509401321411 0.75 0.5\n",
      "1.8805792331695557 0.625 0.625\n",
      "1.2243921756744385 0.625 0.5\n",
      "1.70823073387146 0.75 0.375\n",
      "2.6385128498077393 0.5 0.625\n",
      "2.0976829528808594 0.375 0.375\n",
      "2.317525863647461 0.125 0.5\n",
      "1.6497645378112793 0.625 0.5\n",
      "3.7648026943206787 0.25 0.375\n",
      "1.4439258575439453 0.625 0.375\n",
      "1.2010451555252075 0.625 0.625\n",
      "0.941055178642273 0.75 0.625\n",
      "2.1451945304870605 0.375 0.625\n",
      "0.9205752611160278 0.75 0.75\n",
      "0.8723894953727722 0.75 0.75\n",
      "1.1365149021148682 0.75 0.625\n",
      "1.9575331211090088 0.5 0.625\n",
      "1.471191167831421 0.625 0.5\n",
      "1.3721545934677124 0.5 0.625\n",
      "2.554208993911743 0.625 0.5\n",
      "0.8967193365097046 0.875 0.75\n",
      "1.8417892456054688 0.625 0.625\n",
      "1.4016155004501343 0.625 0.5\n",
      "2.5524609088897705 0.375 0.375\n",
      "0.6941664218902588 0.875 0.875\n",
      "1.4098701477050781 0.5 0.5\n",
      "1.892138123512268 0.5 0.25\n",
      "2.0606770515441895 0.625 0.375\n",
      "1.9712498188018799 0.5 0.625\n",
      "2.239715814590454 0.375 0.5\n",
      "1.7761237621307373 0.625 0.5\n",
      "1.0301374197006226 0.625 0.75\n",
      "0.7203235626220703 0.875 0.625\n",
      "1.0803779363632202 0.75 0.75\n",
      "2.0722286701202393 0.375 0.5\n",
      "1.4680957794189453 0.5 0.375\n",
      "2.100508213043213 0.375 0.375\n",
      "2.0235190391540527 0.5 0.375\n",
      "2.3435378074645996 0.5 0.5\n",
      "2.1033437252044678 0.25 0.5\n",
      "3.489873170852661 0.625 0.0\n",
      "1.0575058460235596 0.75 0.75\n",
      "1.8866567611694336 0.75 0.75\n",
      "1.191300630569458 0.625 0.75\n",
      "1.5203263759613037 0.5 0.625\n",
      "1.1950805187225342 0.75 0.75\n",
      "0.9771963357925415 0.5 0.75\n",
      "1.1479721069335938 0.625 0.75\n",
      "1.1985576152801514 0.625 0.5\n",
      "1.4524428844451904 0.375 0.375\n",
      "1.6394546031951904 0.75 0.75\n",
      "0.48299872875213623 0.875 0.75\n",
      "1.2127916812896729 0.75 0.5\n",
      "1.30476713180542 0.625 0.75\n",
      "0.503623902797699 0.875 0.75\n",
      "2.336845636367798 0.625 0.5\n",
      "1.560389518737793 0.5 0.625\n",
      "1.104093074798584 0.75 0.625\n",
      "1.2705647945404053 0.625 0.625\n",
      "2.299619197845459 0.5 0.375\n",
      "2.012162685394287 0.5 0.5\n",
      "1.7003860473632812 0.625 0.5\n",
      "1.6789515018463135 0.375 0.375\n",
      "2.9406204223632812 0.375 0.375\n",
      "1.4133813381195068 0.625 0.625\n",
      "2.678645133972168 0.5 0.375\n",
      "1.4938311576843262 0.625 0.625\n",
      "1.2699191570281982 0.75 0.625\n",
      "2.051427125930786 0.5 0.5\n",
      "2.661430835723877 0.375 0.5\n",
      "1.1260870695114136 0.75 0.75\n",
      "1.5461293458938599 0.875 0.375\n",
      "1.0553267002105713 0.75 0.625\n",
      "1.1918318271636963 0.75 0.625\n",
      "1.1115894317626953 0.625 0.75\n",
      "2.1046299934387207 0.5 0.5\n",
      "1.7732055187225342 0.625 0.625\n",
      "1.7243889570236206 0.0 0.125\n",
      "1.1533442735671997 0.625 0.75\n",
      "1.3898261785507202 0.625 0.5\n",
      "2.415253162384033 0.5 0.25\n",
      "0.9998021125793457 0.75 0.75\n",
      "0.9364667534828186 0.75 0.75\n",
      "1.7209951877593994 0.5 0.5\n",
      "1.2043530941009521 0.375 0.625\n",
      "3.6377906799316406 0.25 0.375\n",
      "2.3904852867126465 0.625 0.5\n",
      "1.085421085357666 0.75 0.5\n",
      "0.9754079580307007 0.75 0.625\n",
      "1.1188887357711792 0.5 0.75\n",
      "1.9993351697921753 0.375 0.5\n",
      "1.8211569786071777 0.625 0.75\n",
      "1.325345516204834 0.75 0.625\n",
      "1.9158117771148682 0.375 0.625\n",
      "1.8373444080352783 0.375 0.25\n",
      "1.8607075214385986 0.75 0.5\n",
      "0.801600456237793 0.875 0.625\n",
      "2.0919747352600098 0.625 0.375\n",
      "1.4401406049728394 0.5 0.625\n",
      "1.5409603118896484 0.75 0.625\n",
      "1.8633968830108643 0.5 0.625\n",
      "2.901315450668335 0.25 0.375\n",
      "0.3644994795322418 0.875 1.0\n",
      "1.68936026096344 0.625 0.625\n",
      "0.6968519687652588 0.875 0.75\n",
      "1.5780680179595947 0.625 0.625\n",
      "1.3766028881072998 0.875 0.375\n",
      "1.1773581504821777 0.875 0.625\n",
      "1.2410082817077637 0.625 0.75\n",
      "1.5286734104156494 0.375 0.75\n",
      "1.0345550775527954 0.625 0.625\n",
      "1.1131033897399902 0.5 0.875\n",
      "2.0386977195739746 0.5 0.625\n",
      "1.7272834777832031 0.25 0.375\n",
      "1.8380182981491089 0.5 0.5\n",
      "2.3660264015197754 0.75 0.375\n",
      "1.6246764659881592 0.5 0.375\n",
      "1.6574740409851074 0.75 0.5\n",
      "1.5007243156433105 0.75 0.5\n",
      "1.1299245357513428 0.875 0.375\n",
      "1.1334187984466553 0.75 0.875\n",
      "2.2764060497283936 0.375 0.5\n",
      "1.0171115398406982 0.625 0.75\n",
      "1.5281190872192383 0.625 0.625\n",
      "1.7532873153686523 0.375 0.125\n",
      "1.8092143535614014 0.5 0.5\n",
      "1.2463099956512451 0.625 0.5\n",
      "1.6362223625183105 0.625 0.625\n",
      "1.6612436771392822 0.625 0.5\n",
      "1.6804876327514648 0.625 0.75\n",
      "1.0890341997146606 0.625 0.625\n",
      "1.8361780643463135 0.5 0.25\n",
      "0.9126072525978088 0.75 0.5\n",
      "2.243032455444336 0.375 0.375\n",
      "1.867177963256836 0.75 0.625\n",
      "2.2248077392578125 0.5 0.375\n",
      "2.677899122238159 0.5 0.5\n",
      "1.7519311904907227 0.625 0.5\n",
      "0.958503246307373 0.375 0.5\n",
      "0.9745886325836182 0.75 0.75\n",
      "1.1124463081359863 0.625 0.75\n",
      "0.933824896812439 0.5 0.75\n",
      "3.3229384422302246 0.5 0.375\n",
      "2.5575625896453857 0.375 0.375\n",
      "1.291429042816162 0.625 0.625\n",
      "1.9092035293579102 0.5 0.5\n",
      "1.6825848817825317 0.625 0.5\n",
      "1.9024311304092407 0.25 0.625\n",
      "1.3260433673858643 0.625 0.875\n",
      "0.9607189893722534 0.625 0.625\n",
      "0.3837975859642029 1.0 1.0\n",
      "1.4506175518035889 0.5 0.375\n",
      "0.5823667049407959 0.875 0.875\n",
      "0.8546518087387085 0.875 0.875\n",
      "0.7650470733642578 0.75 0.75\n",
      "0.7356025576591492 1.0 0.75\n",
      "1.377051830291748 0.75 0.625\n",
      "1.3062459230422974 0.75 0.625\n",
      "0.537497878074646 0.875 0.75\n",
      "0.3629909157752991 1.0 0.75\n",
      "0.84622722864151 0.75 0.75\n",
      "1.120124340057373 0.875 0.75\n",
      "1.149261474609375 0.75 0.75\n",
      "1.7034924030303955 0.5 0.5\n",
      "1.5190259218215942 0.375 0.5\n",
      "1.05418062210083 0.75 0.625\n",
      "1.7735686302185059 0.625 0.5\n",
      "1.7568790912628174 0.625 0.375\n",
      "1.9911394119262695 0.625 0.5\n",
      "1.6547281742095947 0.5 0.5\n",
      "1.2467544078826904 0.75 0.625\n",
      "0.6631900072097778 0.875 0.75\n",
      "0.7134376764297485 0.75 0.75\n",
      "0.3993120789527893 0.875 0.875\n",
      "0.9932422041893005 0.875 0.875\n",
      "1.1158697605133057 0.625 0.625\n",
      "2.0196197032928467 0.5 0.375\n",
      "0.2695883810520172 1.0 0.875\n",
      "0.8099259734153748 0.75 0.75\n",
      "0.6994257569313049 0.75 0.75\n",
      "1.3709635734558105 0.75 0.5\n",
      "0.5825209617614746 0.75 0.875\n",
      "1.5377635955810547 0.75 0.5\n",
      "0.4703723192214966 0.875 0.875\n",
      "1.0299625396728516 0.75 0.625\n",
      "1.3541383743286133 0.75 0.625\n",
      "1.3274405002593994 0.75 0.625\n",
      "1.0924630165100098 0.875 0.75\n",
      "0.9601389169692993 0.75 0.75\n",
      "0.2340051829814911 1.0 1.0\n",
      "1.7290985584259033 0.5 0.5\n",
      "0.7568230032920837 0.75 0.875\n",
      "1.0688555240631104 0.5 0.625\n",
      "0.3915196657180786 0.875 1.0\n",
      "1.6327829360961914 0.25 0.625\n",
      "0.2544246017932892 0.75 0.875\n",
      "1.274573564529419 0.5 0.625\n",
      "1.1151213645935059 0.5 0.875\n",
      "1.8911888599395752 0.5 0.5\n",
      "1.384740948677063 0.75 0.5\n",
      "0.6751714944839478 0.75 0.875\n",
      "0.8364942073822021 0.75 0.5\n",
      "1.0097684860229492 0.75 0.875\n",
      "0.7229465246200562 0.875 0.75\n",
      "1.1484229564666748 0.75 0.75\n",
      "0.9773210883140564 0.875 0.625\n",
      "0.547742486000061 0.75 0.875\n",
      "0.757793664932251 0.875 0.75\n",
      "0.44072210788726807 0.875 1.0\n",
      "1.0885509252548218 0.625 0.875\n",
      "0.6290181875228882 0.875 0.75\n",
      "0.9592490196228027 0.875 0.625\n",
      "0.3074948191642761 0.875 1.0\n",
      "1.9390242099761963 0.625 0.625\n",
      "0.5488376617431641 0.875 0.75\n",
      "0.6977177262306213 0.875 0.875\n",
      "1.059058666229248 0.75 0.75\n",
      "1.202466607093811 0.75 0.5\n",
      "1.4313032627105713 0.375 0.375\n",
      "1.2431306838989258 0.625 0.75\n",
      "1.0414800643920898 0.75 0.5\n",
      "0.49419546127319336 0.875 0.75\n",
      "1.9928793907165527 0.5 0.375\n",
      "0.9825987815856934 0.5 0.875\n",
      "2.0625481605529785 0.625 0.625\n",
      "1.1504933834075928 0.75 0.5\n",
      "0.7600985765457153 0.75 0.625\n",
      "0.5267854928970337 0.875 0.75\n",
      "0.7781211137771606 0.875 0.75\n",
      "1.4934508800506592 0.875 0.75\n",
      "0.4277385473251343 0.875 1.0\n",
      "0.7048579454421997 0.875 0.875\n",
      "0.7475042343139648 0.875 0.625\n",
      "2.011265277862549 0.5 0.5\n",
      "1.1897075176239014 0.5 1.0\n",
      "0.7605998516082764 1.0 0.875\n",
      "0.5288785696029663 0.875 1.0\n",
      "0.8168150186538696 0.875 0.625\n",
      "1.545498013496399 0.75 0.375\n",
      "0.7434064149856567 0.875 0.75\n",
      "1.9920966625213623 0.375 0.375\n",
      "1.4779924154281616 0.5 0.75\n",
      "1.6396087408065796 0.375 0.5\n",
      "2.0831241607666016 0.5 0.5\n",
      "1.4040426015853882 0.625 0.5\n",
      "0.5271744728088379 0.75 0.75\n",
      "0.41250765323638916 1.0 0.625\n",
      "0.7743496894836426 0.625 0.625\n",
      "1.2428417205810547 0.625 0.5\n",
      "0.707139253616333 0.75 0.875\n",
      "0.9464740753173828 0.75 0.75\n",
      "1.8704495429992676 0.5 0.5\n",
      "1.856034278869629 0.625 0.625\n",
      "0.9530921578407288 0.75 0.625\n",
      "0.7374357581138611 0.75 0.75\n",
      "1.266526222229004 0.875 0.75\n",
      "0.9999358654022217 0.75 0.75\n",
      "0.9536048769950867 0.75 0.75\n",
      "0.1819421947002411 1.0 1.0\n",
      "0.19147156178951263 1.0 1.0\n",
      "1.900402307510376 0.5 0.5\n",
      "1.273427963256836 0.75 0.625\n",
      "0.8920124769210815 0.75 0.875\n",
      "0.7784255743026733 0.75 0.625\n",
      "0.8217578530311584 0.75 0.625\n",
      "0.9302586317062378 0.75 0.875\n",
      "0.6715144515037537 0.875 0.75\n",
      "1.609370470046997 0.5 0.375\n",
      "0.6553863286972046 0.75 1.0\n",
      "1.9541542530059814 0.375 0.625\n",
      "0.3062945306301117 0.875 1.0\n",
      "0.76187664270401 0.75 0.625\n",
      "2.0185649394989014 0.75 0.75\n",
      "1.399535894393921 0.625 0.375\n",
      "1.118083119392395 0.625 0.625\n",
      "1.265763521194458 0.625 0.5\n",
      "0.6638981103897095 0.875 0.625\n",
      "2.0106513500213623 0.375 0.125\n",
      "1.2899645566940308 0.625 0.75\n",
      "0.30265334248542786 0.875 0.875\n",
      "0.6680649518966675 0.875 0.875\n",
      "1.6757510900497437 0.5 0.625\n",
      "1.3597604036331177 0.625 0.75\n",
      "0.9064550399780273 0.5 0.5\n",
      "1.0133394002914429 0.625 0.75\n",
      "1.0398151874542236 0.875 0.625\n",
      "0.8710927963256836 0.75 0.625\n",
      "1.2453100681304932 0.375 0.625\n",
      "1.3201355934143066 0.625 0.75\n",
      "0.9196398258209229 0.75 0.75\n",
      "0.7054473161697388 0.875 0.625\n",
      "1.6705296039581299 0.375 0.625\n",
      "1.6952040195465088 0.625 0.375\n",
      "1.1025172472000122 0.75 0.5\n",
      "0.47164881229400635 0.875 0.875\n",
      "0.7513190507888794 0.75 0.75\n",
      "1.0074312686920166 0.875 0.75\n",
      "1.8251819610595703 0.625 0.75\n",
      "0.7830346822738647 0.75 0.5\n",
      "2.168914556503296 0.5 0.5\n",
      "0.14742118120193481 1.0 1.0\n",
      "1.2123304605484009 0.375 0.75\n",
      "1.9869945049285889 0.5 0.375\n",
      "0.7167966365814209 0.875 0.875\n",
      "1.1312859058380127 0.75 0.625\n",
      "1.5671465396881104 0.625 0.75\n",
      "1.0369116067886353 0.75 0.875\n",
      "1.5524659156799316 0.875 0.5\n",
      "0.7443912029266357 0.875 0.875\n",
      "0.41599175333976746 0.875 1.0\n",
      "1.953386902809143 0.5 0.375\n",
      "1.6015955209732056 0.625 0.625\n",
      "0.5739901065826416 0.875 0.875\n",
      "0.46728402376174927 0.875 0.625\n",
      "0.45512324571609497 0.875 0.625\n",
      "1.023284912109375 0.75 0.75\n",
      "0.817471981048584 0.75 1.0\n",
      "1.0992408990859985 0.625 0.5\n",
      "0.5950505137443542 0.875 1.0\n",
      "1.824053406715393 0.75 0.625\n",
      "0.5708390474319458 0.875 0.75\n",
      "1.3570654392242432 0.75 0.375\n",
      "0.8648706674575806 0.875 0.75\n",
      "1.5394738912582397 0.75 0.75\n",
      "0.9552072286605835 0.75 0.875\n",
      "0.4865679144859314 0.75 0.75\n",
      "0.9845195412635803 0.75 0.75\n",
      "1.3649413585662842 0.625 0.5\n",
      "2.0208637714385986 0.5 0.5\n",
      "1.0350490808486938 0.75 0.75\n",
      "1.0160233974456787 0.75 0.5\n",
      "1.403714656829834 0.625 0.625\n",
      "2.505725860595703 0.375 0.75\n",
      "0.9035869836807251 0.75 0.875\n",
      "1.8714492321014404 0.625 0.375\n",
      "3.199526786804199 0.5 0.5\n",
      "0.7474255561828613 0.75 0.75\n",
      "0.8917925357818604 0.625 0.75\n",
      "2.0632529258728027 0.5 0.5\n",
      "1.1447358131408691 0.75 0.75\n",
      "1.6934254169464111 0.75 0.5\n",
      "1.646480917930603 0.625 0.625\n",
      "1.3613908290863037 0.625 0.5\n",
      "1.328467845916748 0.625 0.75\n",
      "1.4770071506500244 0.75 0.375\n",
      "1.6597533226013184 0.5 0.5\n",
      "2.058847665786743 0.5 0.5\n",
      "1.8466371297836304 0.625 0.5\n",
      "1.1083407402038574 0.75 0.5\n",
      "0.7457910776138306 0.75 0.875\n",
      "0.5829448699951172 0.875 0.875\n",
      "1.011025309562683 0.75 0.75\n",
      "1.3093197345733643 0.625 0.625\n",
      "0.4191955327987671 1.0 0.875\n",
      "0.48649686574935913 0.875 0.75\n",
      "2.079934597015381 0.5 0.5\n",
      "0.2927926778793335 1.0 1.0\n",
      "0.8322178721427917 0.75 0.75\n",
      "1.374982476234436 0.75 0.625\n",
      "1.0594987869262695 0.75 0.75\n",
      "1.0159432888031006 0.625 0.625\n",
      "1.4645733833312988 0.625 0.625\n",
      "0.43084806203842163 1.0 0.75\n",
      "1.1511034965515137 0.875 0.625\n",
      "1.730562448501587 0.375 0.375\n",
      "1.4399900436401367 0.75 0.5\n",
      "1.504181146621704 0.75 0.75\n",
      "0.7137324213981628 0.75 0.75\n",
      "0.6353610754013062 0.75 0.875\n",
      "0.6978528499603271 0.75 0.625\n",
      "1.5471330881118774 0.5 0.875\n",
      "0.957207202911377 0.75 0.75\n",
      "1.534542441368103 0.5 0.5\n",
      "1.4178649187088013 0.375 0.375\n",
      "0.5224680304527283 0.875 0.75\n",
      "1.5822975635528564 0.5 0.5\n",
      "1.0472593307495117 0.625 0.625\n",
      "0.843361496925354 0.75 0.75\n",
      "0.26087644696235657 1.0 0.75\n",
      "1.5188231468200684 0.75 0.75\n",
      "0.6004340052604675 0.875 0.75\n",
      "1.5394821166992188 0.375 0.625\n",
      "1.2410942316055298 0.625 0.75\n",
      "1.1024423837661743 0.75 0.625\n",
      "0.7088086605072021 0.625 0.875\n",
      "2.7864627838134766 0.375 0.25\n",
      "0.5764120221138 0.75 0.75\n",
      "1.8187885284423828 0.375 0.5\n",
      "0.38445228338241577 1.0 1.0\n",
      "1.4326908588409424 0.5 0.25\n",
      "0.7175294756889343 0.75 0.875\n",
      "1.1929254531860352 0.625 0.625\n",
      "1.3241593837738037 0.625 0.75\n",
      "0.8200594186782837 0.875 0.75\n",
      "1.875025749206543 0.75 0.5\n",
      "2.1220645904541016 0.375 0.625\n",
      "1.5538910627365112 0.5 0.5\n",
      "2.433393955230713 0.5 0.5\n",
      "1.8416743278503418 0.375 0.375\n",
      "0.8475546836853027 0.75 0.75\n",
      "0.7333284616470337 0.75 0.625\n",
      "1.0468659400939941 0.625 0.75\n",
      "0.6735188961029053 0.875 0.875\n",
      "1.072144627571106 0.625 0.375\n",
      "1.1128342151641846 0.75 0.75\n",
      "1.0036730766296387 0.75 0.625\n",
      "0.41622281074523926 0.875 1.0\n",
      "0.3404272496700287 0.75 0.875\n",
      "1.150435209274292 0.75 0.5\n",
      "0.9899682998657227 0.875 0.625\n",
      "0.5649764537811279 0.75 0.875\n",
      "0.8283661603927612 0.75 0.625\n",
      "1.1151078939437866 0.875 0.75\n",
      "0.38494741916656494 0.875 1.0\n",
      "0.3997853398323059 1.0 0.875\n",
      "2.0848045349121094 0.5 0.5\n",
      "1.6593379974365234 0.5 0.5\n",
      "0.11722342669963837 1.0 1.0\n",
      "0.9647171497344971 0.75 0.75\n",
      "1.160775899887085 0.5 0.5\n",
      "1.6338801383972168 0.5 0.625\n",
      "1.98747980594635 0.25 0.5\n",
      "1.1177045106887817 0.625 0.625\n",
      "0.3926687240600586 1.0 0.875\n",
      "1.1578608751296997 0.625 0.625\n",
      "0.7926927804946899 0.75 0.625\n",
      "0.9068678617477417 0.75 0.75\n",
      "0.6316395998001099 0.875 0.875\n",
      "0.6720249652862549 0.875 0.75\n",
      "0.34128203988075256 1.0 0.75\n",
      "0.19978201389312744 1.0 1.0\n",
      "0.7964417934417725 0.5 1.0\n",
      "0.9132083654403687 0.875 0.625\n",
      "1.7779544591903687 0.625 0.625\n",
      "1.006516695022583 0.625 0.625\n",
      "1.4327751398086548 0.75 0.625\n",
      "1.517176866531372 0.625 0.625\n",
      "1.4056334495544434 0.625 0.5\n",
      "0.5483133792877197 1.0 0.625\n",
      "1.4120278358459473 0.75 0.75\n",
      "0.93526691198349 0.75 0.625\n",
      "0.6271026134490967 0.875 0.75\n",
      "0.32193195819854736 0.875 0.75\n",
      "2.381760835647583 0.5 0.375\n",
      "0.8017128705978394 0.625 0.625\n",
      "0.6271781325340271 0.875 0.875\n",
      "0.7877755761146545 0.625 0.875\n",
      "0.36642971634864807 1.0 0.875\n",
      "0.2791089415550232 1.0 0.875\n",
      "0.8145871162414551 0.75 0.75\n",
      "0.7283250689506531 0.625 0.75\n",
      "1.2419798374176025 0.75 0.75\n",
      "0.7387562394142151 0.75 0.75\n",
      "0.5429032444953918 0.875 0.75\n",
      "1.7444889545440674 0.625 0.75\n",
      "1.2471086978912354 0.75 0.75\n",
      "1.258886694908142 0.625 0.5\n",
      "0.5125139951705933 0.875 1.0\n",
      "0.8661065101623535 0.75 0.75\n",
      "0.44362956285476685 0.875 0.75\n",
      "2.205850601196289 0.5 0.5\n",
      "0.7539923191070557 0.875 0.625\n",
      "1.4903250932693481 0.75 0.625\n",
      "0.6897953152656555 0.75 0.875\n",
      "0.5082127451896667 0.75 0.75\n",
      "1.7197494506835938 0.75 0.5\n",
      "0.6351489424705505 0.75 0.5\n",
      "0.6456205248832703 0.75 0.875\n",
      "1.0691949129104614 0.75 0.75\n",
      "0.27008605003356934 0.875 1.0\n",
      "0.534887433052063 1.0 0.75\n",
      "0.5496298670768738 0.625 0.75\n",
      "0.7313945889472961 1.0 0.5\n",
      "0.8614557981491089 0.75 0.625\n",
      "1.2986308336257935 0.75 0.5\n",
      "1.2532405853271484 0.875 0.75\n",
      "0.6245722770690918 0.75 0.875\n",
      "1.0631275177001953 0.625 0.5\n",
      "1.0319533348083496 0.5 0.5\n",
      "1.5678493976593018 0.5 0.5\n",
      "0.6342051029205322 0.875 0.875\n",
      "0.22791266441345215 1.0 1.0\n",
      "0.6311530470848083 0.75 0.75\n",
      "0.7101776599884033 0.75 0.75\n",
      "0.32785093784332275 0.875 0.875\n",
      "0.5181912779808044 0.875 0.875\n",
      "0.6804248094558716 0.75 0.625\n",
      "0.4074658751487732 0.75 0.875\n",
      "0.5814698934555054 0.75 1.0\n",
      "0.4750541150569916 0.875 0.75\n",
      "1.2787232398986816 0.5 0.75\n",
      "1.0474967956542969 0.625 0.75\n",
      "0.9243881106376648 0.875 0.625\n",
      "1.1082991361618042 0.625 0.625\n",
      "1.382930040359497 0.625 0.625\n",
      "1.0215250253677368 0.875 0.625\n",
      "1.4448223114013672 0.5 0.75\n",
      "0.38348934054374695 0.875 0.875\n",
      "0.8148481845855713 0.625 0.875\n",
      "1.3748970031738281 0.5 0.5\n",
      "0.6438242197036743 0.75 0.875\n",
      "0.925184428691864 0.625 0.75\n",
      "1.8575208187103271 0.75 0.625\n",
      "0.8120332956314087 0.625 0.625\n",
      "0.6397073268890381 0.875 0.875\n",
      "0.7377775311470032 0.875 0.75\n",
      "0.8629159927368164 0.625 0.5\n",
      "1.198962926864624 0.5 0.625\n",
      "1.6270712614059448 0.625 0.5\n",
      "0.5939983129501343 0.75 0.875\n",
      "0.38795408606529236 0.875 0.875\n",
      "1.0263454914093018 0.5 0.375\n",
      "1.0003154277801514 0.75 0.75\n",
      "0.23412257432937622 1.0 0.875\n",
      "1.22637140750885 0.75 0.625\n",
      "0.8873157501220703 0.5 0.75\n",
      "0.8902739882469177 0.75 0.625\n",
      "0.5207392573356628 0.75 0.875\n",
      "1.040622353553772 0.625 0.5\n",
      "0.6036179065704346 0.875 0.875\n",
      "0.694354236125946 0.75 0.875\n",
      "2.1210713386535645 0.5 0.625\n",
      "0.7537846565246582 0.875 0.875\n",
      "0.7525118589401245 0.75 0.75\n",
      "0.8079323768615723 0.875 0.625\n",
      "1.3140250444412231 0.5 0.625\n",
      "0.3969613313674927 0.875 0.875\n",
      "0.37470442056655884 0.875 1.0\n",
      "0.6191565990447998 0.875 0.75\n",
      "0.5677173137664795 0.75 0.875\n",
      "0.5880981683731079 0.75 0.75\n",
      "1.4408113956451416 0.625 0.625\n",
      "0.4476429224014282 0.75 1.0\n",
      "1.3783347606658936 0.375 0.625\n",
      "1.971853494644165 0.5 0.25\n",
      "0.4308238923549652 0.875 0.75\n",
      "1.105179786682129 0.75 0.625\n",
      "0.8120554089546204 0.75 0.75\n",
      "0.7413477897644043 0.5 0.5\n",
      "0.4827894866466522 0.75 1.0\n",
      "0.8161425590515137 0.75 0.875\n",
      "0.7072616815567017 0.75 0.75\n",
      "1.1962571144104004 0.625 0.5\n",
      "0.7067223191261292 0.75 0.75\n",
      "0.7736601829528809 0.75 0.625\n",
      "1.4338150024414062 0.5 0.625\n",
      "0.6592249274253845 0.875 0.875\n",
      "1.0342031717300415 0.75 0.75\n",
      "0.7915680408477783 0.5 0.75\n",
      "0.429057776927948 0.875 0.875\n",
      "0.4232577085494995 0.75 0.875\n",
      "0.699065089225769 0.875 0.625\n",
      "0.7772303819656372 0.875 0.875\n",
      "0.4509164094924927 0.875 1.0\n",
      "0.3138777017593384 0.875 0.875\n",
      "0.7051160335540771 0.75 0.875\n",
      "0.8171902894973755 0.875 0.75\n",
      "1.1243391036987305 0.625 0.625\n",
      "1.3792946338653564 0.75 0.75\n",
      "2.0943338871002197 0.5 0.375\n",
      "0.6171314716339111 0.75 0.75\n",
      "0.7704030275344849 0.875 0.75\n",
      "0.3345796465873718 1.0 0.875\n",
      "0.3787479102611542 0.875 1.0\n",
      "1.346181869506836 0.375 0.625\n",
      "0.0963461697101593 0.875 1.0\n",
      "0.9845156669616699 0.5 0.5\n",
      "0.32610177993774414 0.875 0.875\n",
      "1.754455804824829 0.625 0.625\n",
      "1.5019439458847046 0.5 0.625\n",
      "0.8577879667282104 0.75 0.875\n",
      "1.171875 0.625 0.625\n",
      "2.232982635498047 0.625 0.5\n",
      "0.7535583972930908 0.625 0.875\n",
      "1.0093501806259155 0.625 0.625\n",
      "0.6703333854675293 0.75 0.875\n",
      "0.8550330996513367 0.625 0.75\n",
      "1.025948166847229 0.625 0.75\n",
      "1.2778366804122925 0.625 0.625\n",
      "0.923485517501831 0.75 0.75\n",
      "0.7031375765800476 0.75 0.75\n",
      "0.6021531224250793 0.875 0.875\n",
      "1.6639564037322998 0.625 0.5\n",
      "0.9313726425170898 0.625 0.75\n",
      "0.4647434949874878 1.0 0.75\n",
      "0.6668022871017456 0.625 1.0\n",
      "0.6016152501106262 0.75 0.875\n",
      "1.7909281253814697 0.625 0.625\n",
      "0.6344194412231445 0.875 0.75\n",
      "1.5706323385238647 0.5 0.5\n",
      "0.633423388004303 0.75 0.75\n",
      "0.6057783365249634 0.75 0.875\n",
      "0.7316315174102783 0.875 0.75\n",
      "0.47786903381347656 0.875 0.75\n",
      "0.5455667972564697 0.875 0.75\n",
      "1.2096457481384277 0.75 0.5\n",
      "0.623026967048645 0.875 0.75\n",
      "0.6984008550643921 1.0 0.75\n",
      "1.9985663890838623 0.625 0.875\n",
      "0.485360711812973 0.875 0.75\n",
      "0.6616584062576294 0.875 0.75\n",
      "0.3033868670463562 1.0 1.0\n",
      "0.8904146552085876 0.75 0.625\n",
      "1.300781488418579 0.5 0.625\n",
      "0.40430545806884766 0.875 0.875\n",
      "1.223994493484497 0.75 0.625\n",
      "0.7207773923873901 0.625 0.875\n",
      "0.7004052996635437 0.75 0.875\n",
      "0.8543837070465088 0.625 0.75\n",
      "0.9543416500091553 0.625 0.75\n",
      "0.6068500280380249 0.875 0.75\n",
      "0.8466604948043823 0.875 0.75\n",
      "0.9445115327835083 0.625 0.625\n",
      "1.4309167861938477 0.75 0.25\n",
      "0.811177134513855 0.75 0.75\n",
      "1.2264487743377686 0.625 0.625\n",
      "0.7813247442245483 0.875 0.75\n",
      "1.7230749130249023 0.75 0.75\n",
      "2.191904306411743 0.5 0.5\n",
      "1.1182985305786133 0.875 0.75\n",
      "0.27211064100265503 1.0 0.875\n",
      "0.1516421139240265 1.0 1.0\n",
      "0.43208423256874084 1.0 0.75\n",
      "1.1353951692581177 0.625 0.625\n",
      "1.371546983718872 0.5 0.75\n",
      "1.0480629205703735 0.625 0.75\n",
      "1.333648681640625 0.75 0.625\n",
      "1.3363442420959473 0.625 0.5\n",
      "1.1331963539123535 0.75 0.75\n",
      "0.3402402698993683 0.875 0.875\n",
      "0.6013731956481934 0.75 0.875\n",
      "0.7497023940086365 0.75 0.625\n",
      "0.9141933917999268 0.75 0.75\n",
      "0.26289471983909607 0.875 1.0\n",
      "0.32558244466781616 0.875 1.0\n",
      "1.7042579650878906 0.625 0.375\n",
      "0.5958501100540161 0.875 0.75\n",
      "1.474043369293213 0.75 0.625\n",
      "0.6746078133583069 0.75 0.75\n",
      "2.8999242782592773 0.375 0.5\n",
      "0.36113885045051575 1.0 0.875\n",
      "0.34579142928123474 0.875 1.0\n",
      "0.837847888469696 0.875 0.625\n",
      "1.1306384801864624 0.625 0.75\n",
      "0.28812581300735474 1.0 0.875\n",
      "0.45725658535957336 0.75 1.0\n",
      "0.6777733564376831 0.75 0.75\n",
      "0.49510642886161804 0.75 1.0\n",
      "1.4345247745513916 0.75 0.625\n",
      "0.9945326447486877 0.75 0.5\n",
      "0.37369316816329956 1.0 1.0\n",
      "0.7401301264762878 0.875 0.75\n",
      "1.0620102882385254 0.625 0.75\n",
      "0.9274131059646606 0.875 0.875\n",
      "0.774631142616272 0.75 0.5\n",
      "0.6943029165267944 0.75 0.75\n",
      "0.15483619272708893 1.0 1.0\n",
      "1.3377342224121094 0.75 0.625\n",
      "0.980367124080658 0.75 0.75\n",
      "0.9933608770370483 0.75 0.625\n",
      "1.4045660495758057 0.625 0.75\n",
      "0.9455660581588745 0.75 0.75\n",
      "0.50483238697052 0.875 0.875\n",
      "0.9493111371994019 0.75 0.875\n",
      "1.0134527683258057 0.75 0.75\n",
      "1.3919674158096313 0.75 0.5\n",
      "0.3945654332637787 0.75 0.875\n",
      "0.8244122266769409 0.625 0.75\n",
      "2.1611995697021484 0.375 0.5\n",
      "0.7329951524734497 0.875 0.625\n",
      "1.1089210510253906 0.625 0.625\n",
      "0.3160337209701538 0.875 0.875\n",
      "1.67745041847229 0.75 0.875\n",
      "0.3984473943710327 0.875 0.875\n",
      "1.3208262920379639 0.625 0.625\n",
      "1.0838414430618286 0.625 0.875\n",
      "1.758629560470581 0.625 0.375\n",
      "0.40621787309646606 0.875 1.0\n",
      "2.050902843475342 0.5 0.25\n",
      "0.5242863297462463 1.0 0.875\n",
      "0.9484441876411438 0.75 0.75\n",
      "1.5247299671173096 0.875 0.625\n",
      "1.563882827758789 0.5 0.5\n",
      "1.199925422668457 0.625 0.75\n",
      "0.7472606301307678 0.75 0.75\n",
      "1.1569364070892334 0.625 0.75\n",
      "0.5258967876434326 0.75 0.875\n",
      "1.0351600646972656 0.75 0.5\n",
      "0.2876785397529602 0.875 1.0\n",
      "0.7694951295852661 0.875 0.875\n",
      "0.4728807210922241 0.625 0.875\n",
      "0.8316140174865723 0.75 0.875\n",
      "1.305600881576538 0.625 0.625\n",
      "0.30077528953552246 0.875 1.0\n",
      "2.062537908554077 0.25 0.25\n",
      "1.4117817878723145 0.625 0.75\n",
      "0.3080098628997803 0.875 0.875\n",
      "0.8786324262619019 0.625 0.875\n",
      "0.5536251068115234 0.75 0.875\n",
      "1.1297166347503662 0.75 0.625\n",
      "0.7208133935928345 0.875 0.75\n",
      "2.1291730403900146 0.5 0.5\n",
      "1.8258414268493652 0.625 0.625\n",
      "0.8426791429519653 0.875 0.625\n",
      "0.5897444486618042 1.0 0.75\n",
      "1.2338645458221436 0.625 0.875\n",
      "0.638577938079834 0.75 0.75\n",
      "0.8498411178588867 0.75 0.75\n",
      "0.5998024940490723 1.0 0.5\n",
      "0.9157415628433228 0.625 0.75\n",
      "0.6515621542930603 0.875 0.875\n",
      "1.0573325157165527 0.625 0.75\n",
      "0.9282474517822266 0.75 0.75\n",
      "0.6408676505088806 0.75 0.75\n",
      "0.6836428642272949 0.75 0.75\n",
      "0.519158124923706 0.875 1.0\n",
      "0.8229563236236572 0.875 0.5\n",
      "1.2629361152648926 0.5 0.5\n",
      "0.7200562953948975 0.875 0.625\n",
      "1.375652551651001 0.625 0.5\n",
      "1.4124908447265625 0.75 0.5\n",
      "1.0368187427520752 0.625 0.75\n",
      "0.8900293111801147 0.5 0.625\n",
      "0.31388428807258606 1.0 0.875\n",
      "0.7116293907165527 0.75 0.625\n",
      "0.7150341272354126 0.75 0.625\n",
      "0.5567255020141602 0.875 0.875\n",
      "0.3215648829936981 0.875 0.75\n",
      "0.12766674160957336 1.0 1.0\n",
      "0.5669645071029663 0.75 0.875\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "BertForQuestionAnswering(\n",
       "  (bert): BertModel(\n",
       "    (embeddings): BertEmbeddings(\n",
       "      (word_embeddings): Embedding(21128, 768, padding_idx=0)\n",
       "      (position_embeddings): Embedding(512, 768)\n",
       "      (token_type_embeddings): Embedding(2, 768)\n",
       "      (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "      (dropout): Dropout(p=0.1, inplace=False)\n",
       "    )\n",
       "    (encoder): BertEncoder(\n",
       "      (layer): ModuleList(\n",
       "        (0): BertLayer(\n",
       "          (attention): BertAttention(\n",
       "            (self): BertSelfAttention(\n",
       "              (query): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (key): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (value): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "            (output): BertSelfOutput(\n",
       "              (dense): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "          )\n",
       "          (intermediate): BertIntermediate(\n",
       "            (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
       "          )\n",
       "          (output): BertOutput(\n",
       "            (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
       "            (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "        (1): BertLayer(\n",
       "          (attention): BertAttention(\n",
       "            (self): BertSelfAttention(\n",
       "              (query): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (key): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (value): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "            (output): BertSelfOutput(\n",
       "              (dense): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "          )\n",
       "          (intermediate): BertIntermediate(\n",
       "            (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
       "          )\n",
       "          (output): BertOutput(\n",
       "            (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
       "            (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "        (2): BertLayer(\n",
       "          (attention): BertAttention(\n",
       "            (self): BertSelfAttention(\n",
       "              (query): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (key): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (value): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "            (output): BertSelfOutput(\n",
       "              (dense): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "          )\n",
       "          (intermediate): BertIntermediate(\n",
       "            (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
       "          )\n",
       "          (output): BertOutput(\n",
       "            (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
       "            (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "        (3): BertLayer(\n",
       "          (attention): BertAttention(\n",
       "            (self): BertSelfAttention(\n",
       "              (query): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (key): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (value): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "            (output): BertSelfOutput(\n",
       "              (dense): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "          )\n",
       "          (intermediate): BertIntermediate(\n",
       "            (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
       "          )\n",
       "          (output): BertOutput(\n",
       "            (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
       "            (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "        (4): BertLayer(\n",
       "          (attention): BertAttention(\n",
       "            (self): BertSelfAttention(\n",
       "              (query): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (key): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (value): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "            (output): BertSelfOutput(\n",
       "              (dense): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "          )\n",
       "          (intermediate): BertIntermediate(\n",
       "            (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
       "          )\n",
       "          (output): BertOutput(\n",
       "            (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
       "            (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "        (5): BertLayer(\n",
       "          (attention): BertAttention(\n",
       "            (self): BertSelfAttention(\n",
       "              (query): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (key): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (value): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "            (output): BertSelfOutput(\n",
       "              (dense): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "          )\n",
       "          (intermediate): BertIntermediate(\n",
       "            (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
       "          )\n",
       "          (output): BertOutput(\n",
       "            (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
       "            (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "        (6): BertLayer(\n",
       "          (attention): BertAttention(\n",
       "            (self): BertSelfAttention(\n",
       "              (query): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (key): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (value): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "            (output): BertSelfOutput(\n",
       "              (dense): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "          )\n",
       "          (intermediate): BertIntermediate(\n",
       "            (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
       "          )\n",
       "          (output): BertOutput(\n",
       "            (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
       "            (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "        (7): BertLayer(\n",
       "          (attention): BertAttention(\n",
       "            (self): BertSelfAttention(\n",
       "              (query): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (key): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (value): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "            (output): BertSelfOutput(\n",
       "              (dense): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "          )\n",
       "          (intermediate): BertIntermediate(\n",
       "            (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
       "          )\n",
       "          (output): BertOutput(\n",
       "            (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
       "            (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "        (8): BertLayer(\n",
       "          (attention): BertAttention(\n",
       "            (self): BertSelfAttention(\n",
       "              (query): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (key): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (value): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "            (output): BertSelfOutput(\n",
       "              (dense): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "          )\n",
       "          (intermediate): BertIntermediate(\n",
       "            (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
       "          )\n",
       "          (output): BertOutput(\n",
       "            (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
       "            (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "        (9): BertLayer(\n",
       "          (attention): BertAttention(\n",
       "            (self): BertSelfAttention(\n",
       "              (query): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (key): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (value): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "            (output): BertSelfOutput(\n",
       "              (dense): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "          )\n",
       "          (intermediate): BertIntermediate(\n",
       "            (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
       "          )\n",
       "          (output): BertOutput(\n",
       "            (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
       "            (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "        (10): BertLayer(\n",
       "          (attention): BertAttention(\n",
       "            (self): BertSelfAttention(\n",
       "              (query): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (key): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (value): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "            (output): BertSelfOutput(\n",
       "              (dense): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "          )\n",
       "          (intermediate): BertIntermediate(\n",
       "            (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
       "          )\n",
       "          (output): BertOutput(\n",
       "            (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
       "            (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "        (11): BertLayer(\n",
       "          (attention): BertAttention(\n",
       "            (self): BertSelfAttention(\n",
       "              (query): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (key): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (value): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "            (output): BertSelfOutput(\n",
       "              (dense): Linear(in_features=768, out_features=768, bias=True)\n",
       "              (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "              (dropout): Dropout(p=0.1, inplace=False)\n",
       "            )\n",
       "          )\n",
       "          (intermediate): BertIntermediate(\n",
       "            (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
       "          )\n",
       "          (output): BertOutput(\n",
       "            (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
       "            (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
       "            (dropout): Dropout(p=0.1, inplace=False)\n",
       "          )\n",
       "        )\n",
       "      )\n",
       "    )\n",
       "  )\n",
       "  (qa_outputs): Linear(in_features=768, out_features=2, bias=True)\n",
       ")"
      ]
     },
     "execution_count": 25,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from torch.utils.data import DataLoader\n",
    "from transformers import AdamW\n",
    "\n",
    "device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')\n",
    "\n",
    "model.to(device)\n",
    "model.train()\n",
    "\n",
    "train_loader = DataLoader(train_dataset, batch_size=8, shuffle=True)\n",
    "\n",
    "optim = AdamW(model.parameters(), lr=5e-5)\n",
    "\n",
    "for epoch in range(3):\n",
    "    for idx, batch in enumerate(train_loader):\n",
    "        optim.zero_grad()\n",
    "        input_ids = batch['input_ids'].to(device)\n",
    "        attention_mask = batch['attention_mask'].to(device)\n",
    "        start_positions = batch['start_positions'].to(device)\n",
    "        end_positions = batch['end_positions'].to(device)\n",
    "        outputs = model(input_ids, attention_mask=attention_mask, start_positions=start_positions, end_positions=end_positions)\n",
    "        loss = outputs[0]\n",
    "        loss.backward()\n",
    "        optim.step()\n",
    "\n",
    "        start_pred = torch.argmax(outputs['start_logits'], dim=1)\n",
    "        end_pred = torch.argmax(outputs['end_logits'], dim=1)\n",
    "        acc1 = ( (start_pred == start_positions).sum() / len(start_pred) ).item()\n",
    "        acc2 = ( (end_pred == end_positions).sum() / len(start_pred) ).item()\n",
    "\n",
    "        if idx % 10 == 0:\n",
    "            print(loss.item(), acc1, acc2)\n",
    "            with codecs.open('log.log', 'a') as up:\n",
    "                up.write('{3}\\t{0}\\t{1}\\t{2}\\n'.format(loss.item(), acc1, acc2, \n",
    "                                                       str(epoch) + '/' + str(idx) +'/'+ str(len(train_loader))))\n",
    "\n",
    "model.eval()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T01:22:12.663490Z",
     "iopub.status.busy": "2021-05-14T01:22:12.662907Z",
     "iopub.status.idle": "2021-05-14T01:22:18.655186Z",
     "shell.execute_reply": "2021-05-14T01:22:18.654025Z",
     "shell.execute_reply.started": "2021-05-14T01:22:12.663432Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "torch.save(model, 'bert-qa.pt')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T01:22:22.387285Z",
     "iopub.status.busy": "2021-05-14T01:22:22.386744Z",
     "iopub.status.idle": "2021-05-14T01:22:33.320088Z",
     "shell.execute_reply": "2021-05-14T01:22:33.318834Z",
     "shell.execute_reply.started": "2021-05-14T01:22:22.387235Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "test_df = pd.read_csv('测试 第 1 轮/test1-utf8.csv')\n",
    "test_df['filename'] = 'input/训练集/image/' + df['filename'] # 改为本地路径\n",
    "\n",
    "test_text = [read_ocr(x) for x in test_df.iloc[:].iterrows()]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T01:22:53.858988Z",
     "iopub.status.busy": "2021-05-14T01:22:53.858425Z",
     "iopub.status.idle": "2021-05-14T01:22:53.865218Z",
     "shell.execute_reply": "2021-05-14T01:22:53.864582Z",
     "shell.execute_reply.started": "2021-05-14T01:22:53.858940Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "test_df['text'] = test_text"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T01:22:54.648114Z",
     "iopub.status.busy": "2021-05-14T01:22:54.647585Z",
     "iopub.status.idle": "2021-05-14T01:22:54.659389Z",
     "shell.execute_reply": "2021-05-14T01:22:54.658775Z",
     "shell.execute_reply.started": "2021-05-14T01:22:54.648068Z"
    },
    "tags": []
   },
   "outputs": [],
   "source": [
    "def predcit(doc, query):\n",
    "    item = tokenizer([doc, query], max_length=512, return_tensors='pt', truncation=True, padding=True,)\n",
    "    with torch.no_grad():\n",
    "        input_ids = torch.tensor(item['input_ids']).to(device).reshape(1, -1)\n",
    "        attention_mask = torch.tensor(item['attention_mask']).to(device).reshape(1, -1)\n",
    "        \n",
    "        outputs = model(input_ids[:, :512], attention_mask[:, :512])\n",
    "        start_pred = torch.argmax(outputs['start_logits'], dim=1)\n",
    "        end_pred = torch.argmax(outputs['end_logits'], dim=1)\n",
    "    \n",
    "    try:\n",
    "        start_pred = item.token_to_chars(0, start_pred)\n",
    "        end_pred = item.token_to_chars(0, end_pred)\n",
    "    except:\n",
    "        return ''\n",
    "    \n",
    "    if start_pred.start > end_pred.end:\n",
    "        return ''\n",
    "    else:\n",
    "        return doc[start_pred.start:end_pred.end]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T01:22:55.688467Z",
     "iopub.status.busy": "2021-05-14T01:22:55.687953Z",
     "iopub.status.idle": "2021-05-14T01:22:55.754408Z",
     "shell.execute_reply": "2021-05-14T01:22:55.753846Z",
     "shell.execute_reply.started": "2021-05-14T01:22:55.688421Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/usr/local/lib/python3.6/dist-packages/ipykernel_launcher.py:4: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  after removing the cwd from sys.path.\n",
      "/usr/local/lib/python3.6/dist-packages/ipykernel_launcher.py:5: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  \"\"\"\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "'4L'"
      ]
     },
     "execution_count": 21,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "predcit(train_df['text'].iloc[25], train_df['question_text'].iloc[25])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T01:23:12.933650Z",
     "iopub.status.busy": "2021-05-14T01:23:12.933092Z",
     "iopub.status.idle": "2021-05-14T01:23:12.941302Z",
     "shell.execute_reply": "2021-05-14T01:23:12.940671Z",
     "shell.execute_reply.started": "2021-05-14T01:23:12.933604Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "index                                                      (53, 2)\n",
       "question_id                                                 Q00049\n",
       "filename                      input/训练集/image/61fd813500f821f8.jpg\n",
       "question_text                                            产品的容量有多大？\n",
       "answer_text                                                     4L\n",
       "text             产品参数产品名称：苏泊尔（SUPOR产品型号：SF40FC663额定电压：220V额定频率：...\n",
       "index_start                                                     53\n",
       "index_end                                                       55\n",
       "Name: 48, dtype: object"
      ]
     },
     "execution_count": 23,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train_df.iloc[25]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T01:23:15.348652Z",
     "iopub.status.busy": "2021-05-14T01:23:15.348118Z",
     "iopub.status.idle": "2021-05-14T01:26:27.520796Z",
     "shell.execute_reply": "2021-05-14T01:26:27.520248Z",
     "shell.execute_reply.started": "2021-05-14T01:23:15.348604Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "0it [00:00, ?it/s]/usr/local/lib/python3.6/dist-packages/ipykernel_launcher.py:4: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  after removing the cwd from sys.path.\n",
      "/usr/local/lib/python3.6/dist-packages/ipykernel_launcher.py:5: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n",
      "  \"\"\"\n",
      "8615it [03:12, 44.84it/s]\n"
     ]
    }
   ],
   "source": [
    "import json\n",
    "import codecs\n",
    "from tqdm import tqdm\n",
    "\n",
    "dw_label = []\n",
    "for row in tqdm(test_df.iloc[:].iterrows()):\n",
    "    dw_label.append({'questionId': row[1].question_id, 'answer': predcit(row[1].text, row[1].question_text)})\n",
    "    \n",
    "import json\n",
    "with open('answer.json', 'w') as up:\n",
    "    json.dump(dw_label, up)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {
    "execution": {
     "iopub.execute_input": "2021-05-14T01:26:40.683535Z",
     "iopub.status.busy": "2021-05-14T01:26:40.682998Z",
     "iopub.status.idle": "2021-05-14T01:26:40.973888Z",
     "shell.execute_reply": "2021-05-14T01:26:40.972362Z",
     "shell.execute_reply.started": "2021-05-14T01:26:40.683488Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "updating: answer.json (deflated 90%)\n"
     ]
    }
   ],
   "source": [
    "!zip -r answer.zip answer.json"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.9"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
