{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 利用 faxtText 进行文本分类"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 导入所需要的软件包 "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 54,
   "metadata": {},
   "outputs": [],
   "source": [
    "import pandas as pd \n",
    "import os\n",
    "from fasttext import train_supervised,load_model,train_unsupervised\n",
    "from sklearn.model_selection import train_test_split"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "heading_collapsed": true
   },
   "source": [
    "## 变量设置"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "hidden": true
   },
   "outputs": [],
   "source": [
    "data_path = \"data/\"\n",
    "train_file = data_path+'train_set_demo.csv'\n",
    "dev_file = data_path+\"dev_set_demo.csv\"\n",
    "test_file = data_path+'test_set_demo.csv'\n",
    "# train_demo_file = data_path+'train_set_demo.csv'\n",
    "train_fasttext_file = data_path+'train_set_fasttext.csv'\n",
    "model_path = \"model/\"\n",
    "# epoch=10\n",
    "# lr=0.1\n",
    "# model_file =  model_path+\"model_fasttext%.2f_epoch%d.ftz\"%(lr, epoch)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "heading_collapsed": true
   },
   "source": [
    "## 数据加载"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "hidden": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(   label                                               text\n",
       " 0      2  2967 6758 339 2021 1854 3731 4109 3792 4149 15...\n",
       " 1     11  4464 486 6352 5619 2465 4802 1452 3137 5778 54...\n",
       " 2      3  7346 4068 5074 3747 5681 6093 1777 2226 7354 6...\n",
       " 3      2  7159 948 4866 2109 5520 2490 211 3956 5520 549...\n",
       " 4      3  3646 3055 3055 2490 4659 6065 3370 5814 2465 5..., 1000)"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df_train = pd.read_csv(train_file,encoding='utf-8',sep='\\t')\n",
    "df_train.head(),len(df_train)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "hidden": true
   },
   "outputs": [],
   "source": [
    "# 随机抽取20%的测试集\n",
    "df_train = df_train.loc[0:len(df_train)*0.8]\n",
    "df_dev = df_train.loc[len(df_train)*0.8:]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {
    "hidden": true
   },
   "outputs": [],
   "source": [
    "df_dev = pd.read_csv(dev_file,encoding='utf-8',sep='\\t')\n",
    "df_test = pd.read_csv(test_file,encoding='utf-8',sep='\\t')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "heading_collapsed": true
   },
   "source": [
    "## 数据预处理"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {
    "hidden": true
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>label</th>\n",
       "      <th>text</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>__label____label__2</td>\n",
       "      <td>2967 6758 339 2021 1854 3731 4109 3792 4149 15...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>__label____label__11</td>\n",
       "      <td>4464 486 6352 5619 2465 4802 1452 3137 5778 54...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>__label____label__3</td>\n",
       "      <td>7346 4068 5074 3747 5681 6093 1777 2226 7354 6...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>__label____label__2</td>\n",
       "      <td>7159 948 4866 2109 5520 2490 211 3956 5520 549...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>__label____label__3</td>\n",
       "      <td>3646 3055 3055 2490 4659 6065 3370 5814 2465 5...</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "                  label                                               text\n",
       "0   __label____label__2  2967 6758 339 2021 1854 3731 4109 3792 4149 15...\n",
       "1  __label____label__11  4464 486 6352 5619 2465 4802 1452 3137 5778 54...\n",
       "2   __label____label__3  7346 4068 5074 3747 5681 6093 1777 2226 7354 6...\n",
       "3   __label____label__2  7159 948 4866 2109 5520 2490 211 3956 5520 549...\n",
       "4   __label____label__3  3646 3055 3055 2490 4659 6065 3370 5814 2465 5..."
      ]
     },
     "execution_count": 19,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df_train['label'] = df_train['label'].apply(lambda x: '__label__'+str(x))\n",
    "df_train.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {
    "hidden": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "801"
      ]
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "len(df_train)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "heading_collapsed": true
   },
   "source": [
    "## 数据保存"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {
    "hidden": true
   },
   "outputs": [],
   "source": [
    "df_train.to_csv(train_fasttext_file,encoding='utf-8',sep='\\t',index=None,header=0)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "heading_collapsed": true
   },
   "source": [
    "## fastText "
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "hidden": true
   },
   "source": [
    "### fastText  训练"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "hidden": true
   },
   "source": [
    "- 效果对比\n",
    "    - test_a_sample_submit_ft_9207\n",
    "        - 参数：\n",
    "            - wordNgrams ： 2\n",
    "            - minCount : 1\n",
    "            - lr: 0.1\n",
    "            - epoch : 10\n",
    "\n",
    "\n",
    "\n",
    "def train_supervised(input, lr=0.1, dim=100, \n",
    "                   ws=5, epoch=5, minCount=1, \n",
    "                   minCountLabel=0, minn=0, \n",
    "                   maxn=0, neg=5, wordNgrams=1, \n",
    "                   loss=\"softmax\", bucket=2000000, \n",
    "                   thread=12, lrUpdateRate=100,\n",
    "                   t=1e-4, label=\"__label__\", \n",
    "                   verbose=2, pretrainedVectors=\"\"):\n",
    "  \"\"\"\n",
    "  训练一个监督模型, 返回一个模型对象\n",
    "\n",
    "  @param input: 训练数据文件路径\n",
    "  @param lr:              学习率\n",
    "  @param dim:             向量维度\n",
    "  @param ws:              cbow模型时使用\n",
    "  @param epoch:           次数\n",
    "  @param minCount:        词频阈值, 小于该值在初始化时会过滤掉\n",
    "  @param minCountLabel:   类别阈值，类别小于该值初始化时会过滤掉\n",
    "  @param minn:            构造subword时最小char个数\n",
    "  @param maxn:            构造subword时最大char个数\n",
    "  @param neg:             负采样\n",
    "  @param wordNgrams:      n-gram个数\n",
    "  @param loss:            损失函数类型, softmax, ns: 负采样, hs: 分层softmax\n",
    "  @param bucket:          词扩充大小, [A, B]: A语料中包含的词向量, B不在语料中的词向量\n",
    "  @param thread:          线程个数, 每个线程处理输入数据的一段, 0号线程负责loss输出\n",
    "  @param lrUpdateRate:    学习率更新\n",
    "  @param t:               负采样阈值\n",
    "  @param label:           类别前缀\n",
    "  @param verbose:         ??\n",
    "  @param pretrainedVectors: 预训练的词向量文件路径, 如果word出现在文件夹中初始化不再随机\n",
    "  @return model object\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {
    "hidden": true
   },
   "outputs": [],
   "source": [
    "lr=0.05\n",
    "epoch= 50\n",
    "model_file =  model_path+\"model_fasttext%.2f_epoch%d.ftz\"%(lr, epoch)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {
    "hidden": true
   },
   "outputs": [],
   "source": [
    "model = train_supervised(input=train_fasttext_file, epoch=50, lr=0.1, wordNgrams=2, minCount=1, loss=\"softmax\")"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "heading_collapsed": true,
    "hidden": true
   },
   "source": [
    "### 模型保存"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {
    "hidden": true
   },
   "outputs": [],
   "source": [
    "model.save_model(model_file)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "heading_collapsed": true,
    "hidden": true
   },
   "source": [
    "### 模型加载"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {
    "hidden": true
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n"
     ]
    }
   ],
   "source": [
    "model = load_model(model_file)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "heading_collapsed": true,
    "hidden": true
   },
   "source": [
    "### 模型测试"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {
    "hidden": true
   },
   "outputs": [],
   "source": [
    "dev_pred_list = []\n",
    "dev_proba_list = []\n",
    "for i in range(len(df_dev['text'])):\n",
    "    lables, proba = model.predict(list(df_dev['text'])[i])\n",
    "    dev_pred_list.append(int(lables[0].split(\"__\")[-1]))\n",
    "    dev_proba_list.append(proba)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {
    "hidden": true
   },
   "outputs": [],
   "source": [
    "from sklearn.metrics import f1_score, precision_score, recall_score\n",
    "from sklearn.metrics import accuracy_score\n",
    "from sklearn.metrics import classification_report"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "metadata": {
    "hidden": true
   },
   "outputs": [],
   "source": [
    "y_dev = list(df_dev['label'])\n",
    "y_pre_dev = dev_pred_list"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "metadata": {
    "hidden": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(1000, 1000)"
      ]
     },
     "execution_count": 32,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "len(y_dev),len(y_pre_dev)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "metadata": {
    "hidden": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "#################### FastText ##################\n",
      "accuracy_score:0.206\n",
      "precision:0.014714285714285713\n",
      "recall:0.07142857142857142\n",
      "2*(precision*recall)/(precision + recall):0.024401800521203505\n",
      "macro fmeasure1:0.024401800521203505\n",
      "micro fmeasure2:0.206\n",
      "              precision    recall  f1-score   support\n",
      "\n",
      "           0       0.00      0.00      0.00       181\n",
      "           1       0.21      1.00      0.34       206\n",
      "           2       0.00      0.00      0.00       152\n",
      "           3       0.00      0.00      0.00       113\n",
      "           4       0.00      0.00      0.00        81\n",
      "           5       0.00      0.00      0.00        61\n",
      "           6       0.00      0.00      0.00        43\n",
      "           7       0.00      0.00      0.00        43\n",
      "           8       0.00      0.00      0.00        44\n",
      "           9       0.00      0.00      0.00        23\n",
      "          10       0.00      0.00      0.00        27\n",
      "          11       0.00      0.00      0.00        14\n",
      "          12       0.00      0.00      0.00         6\n",
      "          13       0.00      0.00      0.00         6\n",
      "\n",
      "    accuracy                           0.21      1000\n",
      "   macro avg       0.01      0.07      0.02      1000\n",
      "weighted avg       0.04      0.21      0.07      1000\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\progrom\\python\\python\\python3\\lib\\site-packages\\sklearn\\metrics\\_classification.py:1272: UndefinedMetricWarning: Precision is ill-defined and being set to 0.0 in labels with no predicted samples. Use `zero_division` parameter to control this behavior.\n",
      "  _warn_prf(average, modifier, msg_start, len(result))\n",
      "D:\\progrom\\python\\python\\python3\\lib\\site-packages\\sklearn\\metrics\\_classification.py:1272: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples. Use `zero_division` parameter to control this behavior.\n",
      "  _warn_prf(average, modifier, msg_start, len(result))\n"
     ]
    }
   ],
   "source": [
    "print(\"#################### FastText ##################\")\n",
    "precision = precision_score(y_dev, y_pre_dev, average=\"macro\")\n",
    "recall = recall_score(y_dev, y_pre_dev, average=\"macro\")\n",
    "fmeasure1 = f1_score(y_dev, y_pre_dev, average=\"macro\")\n",
    "fmeasure2 = f1_score(y_dev, y_pre_dev, average=\"micro\")\n",
    "print(f\"accuracy_score:{accuracy_score(y_dev, y_pre_dev)}\")  \n",
    "print(f\"precision:{precision}\")\n",
    "print(f\"recall:{recall}\")\n",
    "print(f\"2*(precision*recall)/(precision + recall):{2*(precision*recall)/(precision + recall)}\")\n",
    "print(f\"macro fmeasure1:{fmeasure1}\")\n",
    "print(f\"micro fmeasure2:{fmeasure2}\")\n",
    "print(classification_report(y_true=y_dev, y_pred=y_pre_dev))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "heading_collapsed": true,
    "hidden": true
   },
   "source": [
    "### 模型预测"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "metadata": {
    "hidden": true
   },
   "outputs": [],
   "source": [
    "pred_list = []\n",
    "proba_list = []\n",
    "for i in range(len(df_test['text'])):\n",
    "    lables, proba = model.predict(list(df_test['text'])[i])\n",
    "    pred_list.append(int(lables[0].split(\"__\")[-1]))\n",
    "    proba_list.append(proba)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "metadata": {
    "hidden": true
   },
   "outputs": [],
   "source": [
    "import pandas as pd\n",
    "test_df = pd.DataFrame()\n",
    "test_df['label'] = pred_list\n",
    "test_df['proba'] = proba_list"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "metadata": {
    "hidden": true
   },
   "outputs": [],
   "source": [
    "test_df.to_csv(\"data/fasttext_with_score_test.csv\",encoding=\"utf-8\",index=None)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## fasttext 训练词向量"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "['3750', '648', '900', '3370', '6122', '4464', '7399', '4939', '3659', '4811', '5598', '669', '2465', '2400', '5560', '299', '2109', '4893', '4411', '1699', '1519', '803', '1635', '6065', '5998', '1903', '5445', '1324', '2376', '340', '4659', '3800', '5948', '1460', '1633', '1985', '6017', '2614', '1465', '3961', '4853', '5393', '2210', '6250', '5602', '3700', '5977', '2799', '4646', '7539', '4516', '2252', '7543', '151', '6357', '619', '3915', '5410', '7495', '913', '6093', '5620', '4128', '5780', '5915', '7194', '4149', '3223', '2073', '307', '1375', '2490', '2662', '5491', '4490', '5296', '5036', '4269', '2106', '5858', '5330', '3686', '1394', '1667', '6045', '465', '3099', '5176', '5430', '2539', '1866', '25', '4559', '6038', '4704', '192', '1141', '1080', '6248', '3272', '5028', '1567', '6831', '4190', '6104', '3618', '3374', '2859', '7010', '1702', '3166', '2119', '3530', '3605', '4525', '6407', '4822', '1099', '6644', '5659', '7186', '3772', '5510', '5589', '531', '6637', '1407', '5619', '2986', '4480', '3641', '4063', '1920', '1722', '1906', '2515', '23', '5677', '910', '5689', '5778', '5736', '4124', '5011', '4167', '5310', '1070', '1767', '2212', '7377', '5612', '7509', '2595', '1018', '1736', '6835', '3263', '281', '3809', '5537', '1363', '1277', '3220', '3893', '3743', '4958', '2555', '2541', '5566', '6887', '1871', '742', '2289', '2107', '2265', '4969', '3300', '2597', '6040', '6242', '5949', '4151', '4409', '5057', '512', '1066', '486', '3870', '1854', '5096', '4562', '2313', '7467', '7261', '7058', '2828', '7492', '62', '5470', '3747', '7346', '4109', '6508', '5051', '2693', '2112', '7044', '5526', '7239', '5702', '3068', '</s>', '6832', '3661', '6810', '4392', '1731', '7123', '2364', '2489', '2380', '1679', '5264', '3694', '4211', '6734', '3859', '6333', '6293', '1734', '6929', '6630', '6980', '6983', '4936', '6759', '3560', '3568', '2729', '6286', '1215', '1344', '1815', '4576', '6656', '7305', '1401', '1913', '4721', '5530', '7444', '7055', '1031', '2770', '761', '5520', '5284', '2304', '4231', '3792', '2538', '3634', '150', '2549', '2461', '6501', '1647', '6641', '6101', '4630', '3630', '5037', '2975', '2087', '5271', '2967', '1877', '7370', '3193', '2471', '1036', '4498', '4230', '2410', '4139', '3038', '4655', '1859', '3706', '2827', '3196', '2621', '5640', '1362', '730', '4148', '3154', '4301', '2197', '6301', '3433', '4293', '1779', '1348', '3000', '3586', '450', '7039', '1279', '6453', '6289', '3335', '6609', '606', '3780', '6206', '2147', '6770', '3864', '5718', '544', '1334', '1726', '4068', '623', '4967', '5122', '2466', '2446', '7255', '2058', '4333', '4216', '2923', '1641', '2717', '6469', '2674', '6713', '4261', '6405', '6714', '4933', '5282', '1264', '4350', '955', '6822', '2810', '5450', '2456', '1271', '5328', '3578', '1816', '2402', '4499', '3440', '3523', '3317', '4462', '6920', '4412', '7309', '885', '6811', '2786', '290', '433', '4671', '5192', '6909', '5681', '2115', '5538', '7328', '7449', '7160', '922', '1844', '5724', '3692', '478', '2791', '19', '408', '134', '3397', '264', '6485', '3067', '3912', '6654', '2444', '670', '541', '5787', '7403', '94', '7349', '5041', '4599', '1241', '6178', '2891', '7212', '5338', '893', '656', '3976', '4046', '4326', '3971', '868', '4902', '350', '872', '4180', '3018', '4321', '6012', '4181', '133', '1610', '2192', '1952', '4089', '4173', '7159', '4417', '4298', '2463', '4233', '2151', '535', '2154', '5139', '6227', '4117', '6521', '2685', '4744', '6352', '1511', '3607', '2899', '4407', '6666', '1605', '2435', '671', '2226', '2695', '1582', '7400', '5397', '4531', '3585', '3242', '5178', '4105', '1580', '1258', '1219', '6740', '4125', '2974', '4923', '2282', '5864', '5505', '790', '7256', '2042', '7486', '4866', '6966', '1695', '2522', '3695', '6569', '2477', '5165', '4080', '7154', '5547', '6050', '4612', '5955', '4981', '6919', '4998', '6088', '296', '6861', '3504', '4469', '5889', '3329', '5906', '5519', '1571', '2029', '4650', '116', '965', '383', '2612', '5971', '4510', '4430', '3461', '1970', '6833', '2021', '4145', '3765', '3648', '3203', '4568', '3137', '265', '4741', '826', '64', '5938', '6518', '6846', '5006', '4396', '1934', '4355', '2993', '2396', '5292', '5498', '3231', '3373', '7363', '2121', '2315', '7408', '4166', '330', '3901', '3508', '314', '343', '5698', '6722', '1660', '2848', '6319', '3106', '4291', '6143', '4780', '1457', '847', '6886', '2448', '1999', '5744', '1146', '6689', '248', '5497', '532', '2205', '5708', '5099', '2004', '736', '2506', '1227', '137', '3117', '5235', '1323', '6014', '5810', '2990', '3764', '5999', '5166', '7037', '980', '6350', '3770', '5788', '4909', '3056', '5920', '1116', '2970', '3464', '920', '7528', '751', '3342', '4986', '7420', '6535', '5816', '507', '4653', '3500', '3012', '3945', '1283', '7465', '1592', '4802', '3007', '5926', '794', '663', '3824', '1308', '2688', '1395', '3937', '6182', '2255', '2334', '3310', '3289', '1919', '1706', '6596', '414', '7047', '1335', '6160', '7257', '5226', '88', '4553', '584', '419', '810', '565', '4381', '7013', '1613', '5988', '4351', '5521', '5105', '3495', '1778', '4751', '5370', '2984', '4114', '6043', '517', '7354', '6798', '974', '4042', '4819', '1043', '6854', '1951', '591', '5413', '827', '600', '4760', '3019', '5221', '6902', '7327', '7006', '2499', '101', '4399', '1684', '5909', '6768', '1924', '2851', '6220', '3366', '6678', '4237', '4450', '1245', '1405', '2592', '764', '6223', '6284', '3731', '3255', '2738', '127', '7251', '6985', '7436', '5803', '3051', '1724', '6235', '1351', '641', '1688', '2968', '4053', '5251', '2348', '2007', '5562', '5814', '4369', '4315', '138', '1214', '3477', '2716', '2331', '3725', '5492', '2835', '4583', '4677', '3084', '4603', '3644', '7023', '220', '3017', '3646', '1735', '2570', '4768', '1315', '6930', '1132', '7366', '2378', '5063', '5984', '2918', '1899', '3396', '3481', '4302', '2528', '144', '5081', '3819', '6165', '2218', '6962', '7134', '1152', '5957', '5791', '7127', '1629', '983', '5688', '211', '5179', '6602', '3186', '53', '1697', '6751', '3283', '2983', '4220', '2838', '293', '6651', '6163', '304', '1255', '2769', '5801', '5839', '5117', '6973', '3134', '2316', '6176', '4040', '5693', '3227', '6515', '979', '6977', '462', '7077', '3377', '6567', '936', '2407', '1170', '6725', '4543', '7091', '3097', '543', '1379', '3247', '3364', '1914', '1879', '734', '7532', '6027', '7421', '6308', '6662', '3128', '1103', '26', '902', '569', '3956', '4542', '7042', '5095', '3987', '56', '6899', '4036', '6015', '6695', '6760', '1730', '4858', '7373', '4595', '6560', '2230', '3501', '5881', '873', '2367', '3844', '1889', '6115', '316', '6552', '6007', '6003', '7490', '3529', '2722', '495', '5389', '7147', '4202', '2328', '894', '2131', '812', '6583', '4786', '2602', '5854', '2229', '5860', '5775', '6314', '3129', '7019', '3531', '4287', '6613', '4454', '404', '4458', '4779', '3938', '2505', '1805', '3654', '4329', '4340', '4558', '499', '6908', '5997', '38', '442', '5511', '3775', '5603', '578', '69', '4372', '1991', '3032', '197', '5623', '5005', '5288', '6482', '1247', '5660', '6663', '7032', '4648', '431', '1061', '3130', '1327', '1292', '1299', '4021', '7326', '5466', '1388', '7344', '4377', '7292', '7206', '5168', '2076', '2318', '3598', '2283', '212', '3613', '3615', '1168', '1623', '1252', '3665', '5396', '5882', '245', '2399', '4163', '2099', '7330', '6968', '6167', '7125', '5617', '3860', '2211', '3949', '3198', '368', '4636', '6972', '5741', '6639', '1267', '7219', '6729', '5243', '5486', '4223', '886', '2708', '1757', '6588', '5239', '3691', '2935', '6615', '4403', '1590', '3270', '4547', '6362', '1693', '2699', '6940', '1939', '3456', '4921', '3299', '3873', '5381', '1622', '6549', '3090', '2683', '4130', '1960', '3120', '5298', '5385', '4205', '5436', '6047', '6543', '6004', '83', '3107', '4183', '3469', '36', '948', '443', '3515', '6631', '2397', '3226', '7507', '5335', '5495', '7458', '4876', '4354', '3608', '869', '4679', '3015', '318', '5604', '3762', '7445', '7395', '6551', '2023', '1205', '2491', '3771', '1232', '1154', '5573', '7371', '1891', '57', '3994', '1946', '930', '4003', '5883', '7078', '2873', '5395', '1987', '6986', '2741', '657', '2776', '2642', '1846', '659', '1819', '1274', '4305', '355', '6875', '219', '3606', '4842', '1148', '1250', '3782', '1616', '5367', '2434', '4643', '7029', '141', '7157', '7180', '5422', '1793', '957', '2495', '5305', '677', '6674', '1936', '3418', '7025', '2484', '4203', '1670', '3070', '5606', '3484', '4544', '5594', '2181', '5089', '7121', '6193', '6192', '2663', '4894', '4120', '5480', '5936', '5678', '6890', '6066', '6758', '6587', '1953', '6888', '3066', '1715', '7250', '652', '7254', '4413', '6390', '1129', '729', '7117', '1006', '6063', '3148', '3577', '6649', '2847', '5822', '7393', '7533', '7033', '4641', '4118', '3021', '6149', '7059', '5194', '3088', '1370', '5658', '2502', '4587', '2745', '1397', '361', '3442', '5935', '4214', '1814', '5482', '3155', '580', '6671', '632', '5731', '29', '349', '3783', '2060', '3811', '6393', '3436', '1121', '3040', '4378', '2145', '6959', '4691', '781', '2919', '2727', '1972', '463', '932', '78', '2795', '3062', '7546', '6046', '1746', '3466', '6221', '5647', '583', '763', '4491', '7329', '5821', '6905', '6917', '5109', '3151', '470', '6417', '7311', '3213', '5278', '6625', '2124', '4270', '1518', '7419', '2594', '1361', '5493', '2012', '5649', '5484', '2766', '2512', '5169', '3172', '6243', '4486', '6881', '6444', '5472', '3159', '2221', '5229', '1110', '993', '3744', '3479', '445', '1750', '1698', '3703', '3055', '3104', '2411', '216', '1743', '1386', '2655', '1782', '7137', '7038', '2080', '1078', '5705', '382', '177', '2196', '4628', '5426', '3400', '3720', '6704', '5610', '1500', '5535', '405', '341', '5203', '5574', '4199', '1569', '1302', '5631', '2936', '3039', '5555', '5322', '5402', '2130', '2028', '6981', '2035', '4122', '3354', '2610', '1744', '3112', '7110', '6898', '3168', '5202', '2646', '744"
     ]
    },
    {
     "data": {
      "text/html": [
       "<b>limit_output extension: Maximum message size of 10000 exceeded with 48297 characters</b>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "skipgram_model = train_unsupervised(\"D:/project/python_wp/nlp/team-learning-nlp/NewsTextClassification/data/all.csv\",model='skipgram')\n",
    "print(skipgram_model.words) # list of words in dictionary"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "skipgram_model.save_model(\"../vec/skipgram_model.bin\")"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 转换 fasttext 词向量 数据格式 (bin->vector)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 55,
   "metadata": {},
   "outputs": [],
   "source": [
    "vec_path = \"../vec/\"\n",
    "model_name_list = [\n",
    "    \"skipgram_tfidf\",\n",
    "    \"skipgram_re\",\n",
    "    \"skipgram\"\n",
    "]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 56,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(skipgram_model.get_words()):6152\n",
      "len(skipgram_model.get_output_matrix()):6152\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Warning : `load_model` does not return WordVectorModel or SupervisedModel any more, but a `FastText` object which is very similar.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(skipgram_model.get_words()):6152\n",
      "len(skipgram_model.get_output_matrix()):6152\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Warning : `load_model` does not return WordVectorModel or SupervisedModel any more, but a `FastText` object which is very similar.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "len(skipgram_model.get_words()):6152\n",
      "len(skipgram_model.get_output_matrix()):6152\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Warning : `load_model` does not return WordVectorModel or SupervisedModel any more, but a `FastText` object which is very similar.\n"
     ]
    }
   ],
   "source": [
    "for model_name in model_name_list:\n",
    "    print(f\"len(skipgram_model.get_words()):{len(skipgram_model.get_words())}\")\n",
    "    print(f\"len(skipgram_model.get_output_matrix()):{len(skipgram_model.get_output_matrix())}\")\n",
    "    model = load_model(f\"{vec_path}{model_name}.bin\")\n",
    "    fo = open(f\"{vec_path}{model_name}.vector\", \"w\")\n",
    "    num = 0\n",
    "    words = model.get_words()\n",
    "    vectors = model.get_output_matrix()\n",
    "    for word,vec in zip(words,vectors):\n",
    "        if num==0:\n",
    "            fo.write(f\"{len(words)} {len(vec)}\\n\")\n",
    "        vec = ' '.join([str(v) for v in list(vec)])\n",
    "        fo.write(f\"{str(word)} {vec}\\n\")\n",
    "        num = num+1\n",
    "    fo.close()\n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "6152"
      ]
     },
     "execution_count": 40,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "6152"
      ]
     },
     "execution_count": 39,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 53,
   "metadata": {
    "collapsed": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "-0.28017086 0.11350695 -0.04262782 -0.04602467 -0.09189865 0.05667208 0.10785393 0.13750735 0.012261553 -0.10165975 -0.18224685 0.23385282 0.080661416 -0.18840082 0.15847741 -0.21571872 -0.17095001 -0.21586788 0.028861133 -0.17697865 0.022861684 0.047177915 0.05555938 0.19529851 -0.18746503 0.10704636 -0.06867341 0.16720164 0.021176228 -0.3204781 -0.008164253 0.18224248 -0.11043159 -0.23564485 -0.316806 0.15005909 -0.21871711 0.072890386 -0.11560779 -0.0733482 0.004734337 0.1710536 -0.13737045 0.039193146 0.086451344 0.12931097 -0.086180866 0.06970388 -0.07609641 0.056510027 0.17541671 -0.38054082 -0.34623057 -0.3520798 0.006494347 0.11706478 0.3999753 -0.12466619 0.30152428 0.06916078 -0.007933292 -0.20891497 0.029494235 -0.028727993 -0.045374274 0.07576568 0.02458046 0.2593493 -0.2884896 0.1259758 -0.059084617 0.13245226 -0.18634497 -0.09137347 -0.094235785 -0.035731245 0.17370886 0.05358243 0.24141954 0.13658875 -0.14156654 -0.2423847 -0.2015774 0.060784955 0.1166913 -0.15485975 0.08293889 0.33446184 0.19545268 0.022261456 0.262053 -0.0670339 -0.023057189 0.15583932 0.37565848 -0.04788249 -0.10735807 -0.32669297 0.092570975 0.024945533\n",
      "-0.37056774 -0.015838865 -0.005468132 -0.099327646 -0.13694188 -0.022170871 0.07897448 0.092528574 -0.03274775 -0.109521136 -0.15372993 0.19865423 0.16175038 -0.26372987 0.13488837 -0.13089104 -0.16356374 -0.1051213 0.035303213 -0.1670486 0.114901476 0.11273137 0.0025249126 0.09908586 -0.21513698 0.15035631 0.0057970462 0.030838842 0.030590594 -0.3030325 -0.092682324 0.22843295 -0.1516976 -0.24431449 -0.361704 0.24607089 -0.10187059 0.058054015 -0.17793351 -0.11751725 0.030206975 0.16421165 -0.16412553 0.042895596 -0.035855435 0.25585586 -0.057255268 0.11286597 -0.13189071 -0.079435065 0.17383564 -0.3720071 -0.3628133 -0.22775996 -0.042709026 0.14506872 0.4223946 -0.0919503 0.27781585 -0.053883146 -0.022221714 -0.2531138 -0.04187209 -0.055738427 -0.098496266 0.0966713 0.016009249 0.29608792 -0.22697687 0.26721296 -0.0055097644 0.08764269 -0.15004188 -0.078892335 -0.0756594 -0.09011525 0.30488485 0.09349674 0.24826887 0.015844524 -0.07286472 -0.212891 -0.20803106 0.049022246 0.1727192 -0.14658111 0.17075126 0.19066484 0.16902746 0.15650558 0.28368604 -0.061403807 -0.068182506 0.15804403 0.3873795 -0.1283621 -0.08986702 -0.24194075 0.0039200243 0.054254845\n",
      "-0.36387855 0.017898181 -0.054076806 0.06810819 -0.068461016 -0.06770948 0.05948925 0.11789623 -0.027055345 -0.2481388 -0.1031251 0.23299484 0.10481228 -0.11438472 0.17077166 -0.19430399 -0.1620446 -0.10172909 0.1026837 -0.09407282 0.068752475 0.008937158 0.058782794 0.11011926 -0.1938815 0.21602838 -0.24592833 0.14011683 -0.07986635 -0.30094495 -0.03646756 0.16581675 -0.114093676 -0.26845866 -0.2601604 0.23039035 -0.045566257 0.07454873 -0.11822225 -0.07848218 0.054145157 0.18797116 -0.1704607 0.07119771 0.050979868 0.016820958 -0.053240415 0.05759592 -0.0798825 0.066014916 0.15619108 -0.3302744 -0.22181639 -0.358964 0.013667614 0.06859676 0.3662965 -0.23414905 0.28920594 0.040363837 -0.016046293 -0.29741833 -0.015225678 -0.03018974 -0.106398106 0.03545675 0.07690404 0.29554057 -0.23467158 0.2277314 -0.07209692 0.13525562 -0.17611562 -0.049016602 -0.08041619 -0.05054206 0.12506117 -0.02110704 0.29646227 0.1261153 -0.027875347 -0.24751799 -0.1834103 0.17474651 0.11547551 -0.16979952 0.032619335 0.3132924 0.18653859 0.096710466 0.22568339 -0.059704393 -0.10138475 0.14752908 0.29662853 0.09861597 -0.09440456 -0.28191444 -0.007741202 0.0029002272\n",
      "-0.040507775 -0.16184863 0.024923688 -0.0069875917 -0.1592891 -0.15378998 -0.07544721 0.36892727 0.04819577 0.06756448 -0.14626193 -0.2448657 -0.08169545 -0.19022925 0.42688078 -0.40232038 -0.24390003 -0.23428471 0.2553131 0.15644258 -0.15557833 -0.020853547 0.15805069 0.074619174 -0.457925 0.07344437 0.0081816055 0.35884786 0.1964542 -0.47133395 -0.14279766 0.44897264 -0.016726922 -0.43900746 -0.22073908 0.3337072 0.0492944 -0.16363795 -0.0964394 -0.24247232 0.4397729 0.40853512 0.043164037 -0.24844328 -0.17185952 -0.26970637 -0.17528176 0.31923795 -0.18100442 0.2727373 0.012639817 -0.3647238 -0.23669764 -0.48411402 0.013429949 0.27679646 0.28155053 -0.42444515 0.2872556 0.23166826 0.09775053 -0.14913562 -0.15558943 -0.27916113 -0.16932902 0.27217638 0.06707405 0.45440614 -0.22071305 0.120123774 -0.15370172 0.4769391 -0.23016441 0.15595192 -0.1741736 -0.060965683 0.2594444 -0.24369977 0.16145058 0.07029211 -0.08632262 -0.35913244 0.054990225 0.060827136 -0.01636269 -0.18481813 -0.15814555 0.38922065 -0.20025766 -0.06919067 0.14861663 0.30709356 0.10414347 0.09949865 0.27991185 0.46948767 -0.26305664 -0.365899 0.24022856 -0.1701348\n",
      "-0.33152184 -0.10051549 0.12877293 0.027777584 0.0042296266 0.03901994 -0.004208315 0.25793996 -0.07699941 -0.2636888 -0.15829775 0.12326466 0.297862 -0.14404427 0.18616599 -0.15592599 -0.092584975 -0.33501655 0.0046075787 0.01243748 0.16647437 0.20471787 0.07455572 -0.0026814612 -0.20925699 0.16169675 -0.11725949 0.11474634 -0.008378243 -0.3365296 -0.16806494 0.16922261 -0.20034264 -0.1205974 -0.30563188 0.17810932 -0.15591606 -0.08050221 -0.19770351 -0.072147585 0.03573191 0.12237963 0.008346779 0.09156214 0.15577766 0.16170691 0.16170192 0.030268352 -0.041348945 -0.07577536 0.31575656 -0.34312794 -0.25828665 -0.37857682 0.20024854 0.09802736 0.4094506 -0.17644586 0.26972133 0.1059221 -0.03467111 -0.29411024 -0.047319785 -0.07858044 -0.085708775 0.097169526 -0.052085925 0.21466719 -0.21983187 0.13880807 0.02859256 0.18611392 -0.06760405 -0.11377548 -0.030117633 -0.25221154 0.3289443 0.19387335 0.339845 0.20400617 -0.32383215 -0.14418173 -0.015121454 0.16711478 0.1642344 -0.0937915 0.0049881698 0.25772464 0.1196226 0.21484081 0.3820762 -0.14476366 0.074708536 -0.012448065 0.54852974 0.006913467 -0.13239886 -0.1661308 0.0015885399 0.0040383036\n",
      "-0.07578415 -0.0708557 -0.19902422 0.039915454 -0.3364294 -0.09448882 -0.08216308 0.30058527 0.017211929 -0.12985063 -0.05809866 -0.17114423 0.10150764 -0.3223101 0.4978572 -0.40483966 -0.21486808 -0.23179367 0.17535487 0.21857019 -0.12876117 -0.08358031 0.17631738 0.12957968 -0.46358642 0.09077366 -0.023133975 0.39962438 0.20751056 -0.31437197 -0.062764004 0.41851115 -0.12095487 -0.2966491 -0.103411056 0.1341059 0.008864066 -0.206229 -0.07938945 -0.07140623 0.31993836 0.4054097 -0.20978293 -0.29439878 -0.11234929 -0.28512338 -0.17522554 0.2734084 -0.24870822 0.346744 -0.047793385 -0.4511114 -0.2351898 -0.42803493 0.064090244 0.24181914 0.21957518 -0.419534 0.33041072 0.21374363 0.23666272 -0.1321843 -0.27235854 -0.26837528 -0.06206591 0.24290363 0.016884204 0.4695604 -0.2968223 0.12671918 -0.117082804 0.40807846 -0.2599302 0.35137463 -0.21640396 -0.12140493 0.204343 -0.21782055 0.15326099 0.104253426 -0.08200047 -0.3036152 0.03665658 0.10779636 -0.035846032 -0.14429536 -0.0552939 0.49910447 -0.21361572 -0.09797525 0.28304422 0.25601318 0.01701026 0.018946351 0.3411518 0.58900523 -0.40606403 -0.2999592 0.19532166 -0.1710856\n",
      "-0.19650559 -0.018341538 -0.03883235 -0.030843934 -0.107311554 0.075726464 0.04611784 0.14974242 0.03685027 -0.18002011 -0.11442638 0.15875351 0.045619287 -0.23143317 0.23921259 -0.05379493 -0.17408945 -0.2397486 0.02267251 -0.14131314 0.18422042 0.17680141 0.10228132 0.10339056 -0.25235978 0.12359247 -0.17533565 0.08515385 0.022958808 -0.3834242 -0.0037285667 0.3019823 0.017727263 -0.37860498 -0.24547002 0.16919301 -0.22398515 0.022105457 -0.17055818 0.02561382 -0.09937134 0.18240191 -0.22246304 0.08590509 0.117828205 0.28752446 0.075401306 -0.051088717 -0.21285716 -0.009648146 0.15482214 -0.46852607 -0.33557114 -0.29547238 0.05089262 -0.010777302 0.44869587 -0.24912556 0.2890473 0.07840594 0.05892288 -0.18927033 0.0509044 -0.13013944 0.04008597 0.067488015 -0.056325942 0.19674008 -0.21669008 0.25342998 0.0368896 0.22501081 -0.116510846 -0.025296966 -0.09180106 -0.15365393 0.09248273 -0.11601822 0.22196086 0.09304529 -0.14483139 -0.25008425 -0.08212788 0.1093876 0.19104798 -0.24513204 0.046358038 0.24231723 0.1134976 0.11135197 0.1128474 -0.077221 0.0030670636 0.078987844 0.3446046 0.07479904 0.032255027 -0.20563827 0.041131597 0.03751008\n",
      "-0.3554686 -0.04536847 0.044842727 -0.082410745 0.019018227 -0.027017262 -0.041269906 0.10888335 0.042650655 -0.2969567 -0.14944993 0.19588569 0.10425345 -0.1273838 0.27900168 -0.055131767 -0.008412691 -0.1246761 0.06866438 -0.27181232 0.20773076 0.15344854 0.12864102 0.14356901 -0.15111277 0.16819495 0.027498929 -0.024854535 -0.016266214 -0.39506307 -0.07201048 0.12126895 -0.21377337 -0.33389178 -0.37767556 0.25978515 -0.16912259 -0.046673916 -0.25620848 -0.13450146 0.019705582 0.096823245 -0.041977856 0.19523428 0.072363846 0.22906284 0.096771315 -0.06781308 -0.082354106 0.00430196 0.40228787 -0.28213826 -0.3650441 -0.20651011 0.15303819 0.13336402 0.40935805 -0.2866383 0.3329642 -0.07332561 0.059125744 -0.23471051 0.07449604 -0.1494852 -0.13703711 0.035428293 -0.15665226 0.40392405 -0.2557528 0.113761395 -0.08710207 0.12674919 -0.022202983 -0.39988804 -0.033700038 -0.15977016 0.4217803 0.1503927 0.22017586 0.055616144 -0.11439152 -0.237855 -0.20246358 0.15930717 0.03964664 -0.30668244 0.14946416 0.25402454 0.241294 0.12387335 0.20454067 -0.09450032 -0.12830184 0.20493431 0.4323685 -0.21000823 -0.15925404 -0.20929073 0.05361381 0.044181727\n",
      "-0.060703743 -0.10184445 -0.12712023 0.025241273 -0.2725509 -0.12957816 -0.11956124 0.26907966 0.06283496 -0.10401191 0.006490988 -0.12759486 0.030000035 -0.25974813 0.42281696 -0.3807484 -0.15386699 -0.19058736 0.16941223 0.20158009 -0.20487852 -0.09352891 0.13548602 0.056224767 -0.3991533 0.044708762 -0.027687252 0.42744228 0.20431082 -0.36010724 -0.10610298 0.5137547 -0.1073043 -0.34304985 -0.13149625 0.17538887 0.048158925 -0.20347477 -0.054513857 -0.1121201 0.36593807 0.45122483 -0.19286926 -0.29318902 -0.14590333 -0.32126394 -0.18767925 0.2715208 -0.21265803 0.33125857 -0.03550629 -0.46833915 -0.23274271 -0.42548555 0.034768905 0.23837234 0.28144264 -0.32405534 0.34702092 0.22240216 0.2280992 -0.1283274 -0.2464991 -0.317"
     ]
    },
    {
     "data": {
      "text/html": [
       "<b>limit_output extension: Maximum message size of 10000 exceeded with 1085373 characters</b>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 44,
   "metadata": {
    "collapsed": true
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'3750': [-0.28017086,\n",
       "  0.11350695,\n",
       "  -0.04262782,\n",
       "  -0.04602467,\n",
       "  -0.09189865,\n",
       "  0.05667208,\n",
       "  0.10785393,\n",
       "  0.13750735,\n",
       "  0.012261553,\n",
       "  -0.10165975,\n",
       "  -0.18224685,\n",
       "  0.23385282,\n",
       "  0.080661416,\n",
       "  -0.18840082,\n",
       "  0.15847741,\n",
       "  -0.21571872,\n",
       "  -0.17095001,\n",
       "  -0.21586788,\n",
       "  0.028861133,\n",
       "  -0.17697865,\n",
       "  0.022861684,\n",
       "  0.047177915,\n",
       "  0.05555938,\n",
       "  0.19529851,\n",
       "  -0.18746503,\n",
       "  0.10704636,\n",
       "  -0.06867341,\n",
       "  0.16720164,\n",
       "  0.021176228,\n",
       "  -0.3204781,\n",
       "  -0.008164253,\n",
       "  0.18224248,\n",
       "  -0.11043159,\n",
       "  -0.23564485,\n",
       "  -0.316806,\n",
       "  0.15005909,\n",
       "  -0.21871711,\n",
       "  0.072890386,\n",
       "  -0.11560779,\n",
       "  -0.0733482,\n",
       "  0.004734337,\n",
       "  0.1710536,\n",
       "  -0.13737045,\n",
       "  0.039193146,\n",
       "  0.086451344,\n",
       "  0.12931097,\n",
       "  -0.086180866,\n",
       "  0.06970388,\n",
       "  -0.07609641,\n",
       "  0.056510027,\n",
       "  0.17541671,\n",
       "  -0.38054082,\n",
       "  -0.34623057,\n",
       "  -0.3520798,\n",
       "  0.006494347,\n",
       "  0.11706478,\n",
       "  0.3999753,\n",
       "  -0.12466619,\n",
       "  0.30152428,\n",
       "  0.06916078,\n",
       "  -0.007933292,\n",
       "  -0.20891497,\n",
       "  0.029494235,\n",
       "  -0.028727993,\n",
       "  -0.045374274,\n",
       "  0.07576568,\n",
       "  0.02458046,\n",
       "  0.2593493,\n",
       "  -0.2884896,\n",
       "  0.1259758,\n",
       "  -0.059084617,\n",
       "  0.13245226,\n",
       "  -0.18634497,\n",
       "  -0.09137347,\n",
       "  -0.094235785,\n",
       "  -0.035731245,\n",
       "  0.17370886,\n",
       "  0.05358243,\n",
       "  0.24141954,\n",
       "  0.13658875,\n",
       "  -0.14156654,\n",
       "  -0.2423847,\n",
       "  -0.2015774,\n",
       "  0.060784955,\n",
       "  0.1166913,\n",
       "  -0.15485975,\n",
       "  0.08293889,\n",
       "  0.33446184,\n",
       "  0.19545268,\n",
       "  0.022261456,\n",
       "  0.262053,\n",
       "  -0.0670339,\n",
       "  -0.023057189,\n",
       "  0.15583932,\n",
       "  0.37565848,\n",
       "  -0.04788249,\n",
       "  -0.10735807,\n",
       "  -0.32669297,\n",
       "  0.092570975,\n",
       "  0.024945533],\n",
       " '648': [-0.37056774,\n",
       "  -0.015838865,\n",
       "  -0.005468132,\n",
       "  -0.099327646,\n",
       "  -0.13694188,\n",
       "  -0.022170871,\n",
       "  0.07897448,\n",
       "  0.092528574,\n",
       "  -0.03274775,\n",
       "  -0.109521136,\n",
       "  -0.15372993,\n",
       "  0.19865423,\n",
       "  0.16175038,\n",
       "  -0.26372987,\n",
       "  0.13488837,\n",
       "  -0.13089104,\n",
       "  -0.16356374,\n",
       "  -0.1051213,\n",
       "  0.035303213,\n",
       "  -0.1670486,\n",
       "  0.114901476,\n",
       "  0.11273137,\n",
       "  0.0025249126,\n",
       "  0.09908586,\n",
       "  -0.21513698,\n",
       "  0.15035631,\n",
       "  0.0057970462,\n",
       "  0.030838842,\n",
       "  0.030590594,\n",
       "  -0.3030325,\n",
       "  -0.092682324,\n",
       "  0.22843295,\n",
       "  -0.1516976,\n",
       "  -0.24431449,\n",
       "  -0.361704,\n",
       "  0.24607089,\n",
       "  -0.10187059,\n",
       "  0.058054015,\n",
       "  -0.17793351,\n",
       "  -0.11751725,\n",
       "  0.030206975,\n",
       "  0.16421165,\n",
       "  -0.16412553,\n",
       "  0.042895596,\n",
       "  -0.035855435,\n",
       "  0.25585586,\n",
       "  -0.057255268,\n",
       "  0.11286597,\n",
       "  -0.13189071,\n",
       "  -0.079435065,\n",
       "  0.17383564,\n",
       "  -0.3720071,\n",
       "  -0.3628133,\n",
       "  -0.22775996,\n",
       "  -0.042709026,\n",
       "  0.14506872,\n",
       "  0.4223946,\n",
       "  -0.0919503,\n",
       "  0.27781585,\n",
       "  -0.053883146,\n",
       "  -0.022221714,\n",
       "  -0.2531138,\n",
       "  -0.04187209,\n",
       "  -0.055738427,\n",
       "  -0.098496266,\n",
       "  0.0966713,\n",
       "  0.016009249,\n",
       "  0.29608792,\n",
       "  -0.22697687,\n",
       "  0.26721296,\n",
       "  -0.0055097644,\n",
       "  0.08764269,\n",
       "  -0.15004188,\n",
       "  -0.078892335,\n",
       "  -0.0756594,\n",
       "  -0.09011525,\n",
       "  0.30488485,\n",
       "  0.09349674,\n",
       "  0.24826887,\n",
       "  0.015844524,\n",
       "  -0.07286472,\n",
       "  -0.212891,\n",
       "  -0.20803106,\n",
       "  0.049022246,\n",
       "  0.1727192,\n",
       "  -0.14658111,\n",
       "  0.17075126,\n",
       "  0.19066484,\n",
       "  0.16902746,\n",
       "  0.15650558,\n",
       "  0.28368604,\n",
       "  -0.061403807,\n",
       "  -0.068182506,\n",
       "  0.15804403,\n",
       "  0.3873795,\n",
       "  -0.1283621,\n",
       "  -0.08986702,\n",
       "  -0.24194075,\n",
       "  0.0039200243,\n",
       "  0.054254845],\n",
       " '900': [-0.36387855,\n",
       "  0.017898181,\n",
       "  -0.054076806,\n",
       "  0.06810819,\n",
       "  -0.068461016,\n",
       "  -0.06770948,\n",
       "  0.05948925,\n",
       "  0.11789623,\n",
       "  -0.027055345,\n",
       "  -0.2481388,\n",
       "  -0.1031251,\n",
       "  0.23299484,\n",
       "  0.10481228,\n",
       "  -0.11438472,\n",
       "  0.17077166,\n",
       "  -0.19430399,\n",
       "  -0.1620446,\n",
       "  -0.10172909,\n",
       "  0.1026837,\n",
       "  -0.09407282,\n",
       "  0.068752475,\n",
       "  0.008937158,\n",
       "  0.058782794,\n",
       "  0.11011926,\n",
       "  -0.1938815,\n",
       "  0.21602838,\n",
       "  -0.24592833,\n",
       "  0.14011683,\n",
       "  -0.07986635,\n",
       "  -0.30094495,\n",
       "  -0.03646756,\n",
       "  0.16581675,\n",
       "  -0.114093676,\n",
       "  -0.26845866,\n",
       "  -0.2601604,\n",
       "  0.23039035,\n",
       "  -0.045566257,\n",
       "  0.07454873,\n",
       "  -0.11822225,\n",
       "  -0.07848218,\n",
       "  0.054145157,\n",
       "  0.18797116,\n",
       "  -0.1704607,\n",
       "  0.07119771,\n",
       "  0.050979868,\n",
       "  0.016820958,\n",
       "  -0.053240415,\n",
       "  0.05759592,\n",
       "  -0.0798825,\n",
       "  0.066014916,\n",
       "  0.15619108,\n",
       "  -0.3302744,\n",
       "  -0.22181639,\n",
       "  -0.358964,\n",
       "  0.013667614,\n",
       "  0.06859676,\n",
       "  0.3662965,\n",
       "  -0.23414905,\n",
       "  0.28920594,\n",
       "  0.040363837,\n",
       "  -0.016046293,\n",
       "  -0.29741833,\n",
       "  -0.015225678,\n",
       "  -0.03018974,\n",
       "  -0.106398106,\n",
       "  0.03545675,\n",
       "  0.07690404,\n",
       "  0.29554057,\n",
       "  -0.23467158,\n",
       "  0.2277314,\n",
       "  -0.07209692,\n",
       "  0.13525562,\n",
       "  -0.17611562,\n",
       "  -0.049016602,\n",
       "  -0.08041619,\n",
       "  -0.05054206,\n",
       "  0.12506117,\n",
       "  -0.02110704,\n",
       "  0.29646227,\n",
       "  0.1261153,\n",
       "  -0.027875347,\n",
       "  -0.24751799,\n",
       "  -0.1834103,\n",
       "  0.17474651,\n",
       "  0.11547551,\n",
       "  -0.16979952,\n",
       "  0.032619335,\n",
       "  0.3132924,\n",
       "  0.18653859,\n",
       "  0.096710466,\n",
       "  0.22568339,\n",
       "  -0.059704393,\n",
       "  -0.10138475,\n",
       "  0.14752908,\n",
       "  0.29662853,\n",
       "  0.09861597,\n",
       "  -0.09440456,\n",
       "  -0.28191444,\n",
       "  -0.007741202,\n",
       "  0.0029002272],\n",
       " '3370': [-0.040507775,\n",
       "  -0.16184863,\n",
       "  0.024923688,\n",
       "  -0.0069875917,\n",
       "  -0.1592891,\n",
       "  -0.15378998,\n",
       "  -0.07544721,\n",
       "  0.36892727,\n",
       "  0.04819577,\n",
       "  0.06756448,\n",
       "  -0.14626193,\n",
       "  -0.2448657,\n",
       "  -0.08169545,\n",
       "  -0.19022925,\n",
       "  0.42688078,\n",
       "  -0.40232038,\n",
       "  -0.24390003,\n",
       "  -0.23428471,\n",
       "  0.2553131,\n",
       "  0.15644258,\n",
       "  -0.15557833,\n",
       "  -0.020853547,\n",
       "  0.15805069,\n",
       "  0.074619174,\n",
       "  -0.457925,\n",
       "  0.07344437,\n",
       "  0.0081816055,\n",
       "  0.35884786,\n",
       "  0.1964542,\n",
       "  -0.47133395,\n",
       "  -0.14279766,\n",
       "  0.44897264,\n",
       "  -0.016726922,\n",
       "  -0.43900746,\n",
       "  -0.22073908,\n",
       "  0.3337072,\n",
       "  0.0492944,\n",
       "  -0.16363795,\n",
       "  -0.0964394,\n",
       "  -0.24247232,\n",
       "  0.4397729,\n",
       "  0.40853512,\n",
       "  0.043164037,\n",
       "  -0.24844328,\n",
       "  -0.17185952,\n",
       "  -0.26970637,\n",
       "  -0.17528176,\n",
       "  0.31923795,\n",
       "  -0.18100442,\n",
       "  0.2727373,\n",
       "  0.012639817,\n",
       "  -0.3647238,\n",
       "  -0.23669764,\n",
       "  -0.48411402,\n",
       "  0.013429949,\n",
       "  0.27679646,\n",
       "  0.28155053,\n",
       "  -0.42444515,\n",
       "  0.2872556,\n",
       "  0.23166826,\n",
       "  0.09775053,\n",
       "  -0.14913562,\n",
       "  -0.15558943,\n",
       "  -0.27916113,\n",
       "  -0.16932902,\n",
       "  0.27217638,\n",
       "  0.06707405,\n",
       "  0.45440614,\n",
       "  -0.22071305,\n",
       "  0.120123774,\n",
       "  -0.15370172,\n",
       "  0.4769391,\n",
       "  -0.23016441,\n",
       "  0.15595192,\n",
       "  -0.1741736,\n",
       "  -0.060965683,\n",
       "  0.2594444,\n",
       "  -0.24369977,\n",
       "  0.16145058,\n",
       "  0.07029211,\n",
       "  -0.08632262,\n",
       "  -0.35913244,\n",
       "  0.054990225,\n",
       "  0.060827136,\n",
       "  -0.01636269,\n",
       "  -0.18481813,\n",
       "  -0.15814555,\n",
       "  0.38922065,\n",
       "  -0.20025766,\n",
       "  -0.06919067,\n",
       "  0.14861663,\n",
       "  0.30709356,\n",
       "  0.10414347,\n",
       "  0.09949865,\n",
       "  0.27991185,\n",
       "  0.46948767,\n",
       "  -0.26305664,\n",
       "  -0.365899,\n",
       "  0.24022856,\n",
       "  -0.1701348],\n",
       " '6122': [-0.33152184,\n",
       "  -0.10051549,\n",
       "  0.12877293,\n",
       "  0.027777584,\n",
       "  0.0042296266,\n",
       "  0.03901994,\n",
       "  -0.004208315,\n",
       "  0.25793996,\n",
       "  -0.07699941,\n",
       "  -0.2636888,\n",
       "  -0.15829775,\n",
       "  0.12326466,\n",
       "  0.297862,\n",
       "  -0.14404427,\n",
       "  0.18616599,\n",
       "  -0.15592599,\n",
       "  -0.092584975,\n",
       "  -0.33501655,\n",
       "  0.0046075787,\n",
       "  0.01243748,\n",
       "  0.16647437,\n",
       "  0.20471787,\n",
       "  0.07455572,\n",
       "  -0.0026814612,\n",
       "  -0.20925699,\n",
       "  0.16169675,\n",
       "  -0.11725949,\n",
       "  0.11474634,\n",
       "  -0.008378243,\n",
       "  -0.3365296,\n",
       "  -0.16806494,\n",
       "  0.16922261,\n",
       "  -0.20034264,\n",
       "  -0.1205974,\n",
       "  -0.30563188,\n",
       "  0.17810932,\n",
       "  -0.15591606,\n",
       "  -0.08050221,\n",
       "  -0.19770351,\n",
       "  -0.072147585,\n",
       "  0.03573191,\n",
       "  0.12237963,\n",
       "  0.008346779,\n",
       "  0.09156214,\n",
       "  0.15577766,\n",
       "  0.16170691,\n",
       "  0.16170192,\n",
       "  0.030268352,\n",
       "  -0.041348945,\n",
       "  -0.07577536,\n",
       "  0.31575656,\n",
       "  -0.34312794,\n",
       "  -0.25828665,\n",
       "  -0.37857682,\n",
       "  0.20024854,\n",
       "  0.09802736,\n",
       "  0.4094506,\n",
       "  -0.17644586,\n",
       "  0.26972133,\n",
       "  0.1059221,\n",
       "  -0.03467111,\n",
       "  -0.29411024,\n",
       "  -0.047319785,\n",
       "  -0.07858044,\n",
       "  -0.085708775,\n",
       "  0.097169526,\n",
       "  -0.052085925,\n",
       "  0.21466719,\n",
       "  -0.21983187,\n",
       "  0.13880807,\n",
       "  0.02859256,\n",
       "  0.18611392,\n",
       "  -0.06760405,\n",
       "  -0.11377548,\n",
       "  -0.030117633,\n",
       "  -0.25221154,\n",
       "  0.3289443,\n",
       "  0.19387335,\n",
       "  0.339845,\n",
       "  0.20400617,\n",
       "  -0.32383215,\n",
       "  -0.14418173,\n",
       "  -0.015121454,\n",
       "  0.16711478,\n",
       "  0.1642344,\n",
       "  -0.0937915,\n",
       "  0.0049881698,\n",
       "  0.25772464,\n",
       "  0.1196226,\n",
       "  0.21484081,\n",
       "  0.3820762,\n",
       "  -0.14476366,\n",
       "  0.074708536,\n",
       "  -0.012448065,\n",
       "  0.54852974,\n",
       "  0.006913467,\n",
       "  -0.13239886,\n",
       "  -0.1661308,\n",
       "  0.0015885399,\n",
       "  0.0040383036],\n",
       " '4464': [-0.07578415,\n",
       "  -0.0708557,\n",
       "  -0.19902422,\n",
       "  0.039915454,\n",
       "  -0.3364294,\n",
       "  -0.09448882,\n",
       "  -0.08216308,\n",
       "  0.30058527,\n",
       "  0.017211929,\n",
       "  -0.12985063,\n",
       "  -0.05809866,\n",
       "  -0.17114423,\n",
       "  0.10150764,\n",
       "  -0.3223101,\n",
       "  0.4978572,\n",
       "  -0.40483966,\n",
       "  -0.21486808,\n",
       "  -0.23179367,\n",
       "  0.17535487,\n",
       "  0.21857019,\n",
       "  -0.12876117,\n",
       "  -0.08358031,\n",
       "  0.17631738,\n",
       "  0.12957968,\n",
       "  -0.46358642,\n",
       "  0.09077366,\n",
       "  -0.023133975,\n",
       "  0.39962438,\n",
       "  0.20751056,\n",
       "  -0.31437197,\n",
       "  -0.062764004,\n",
       "  0.41851115,\n",
       "  -0.12095487,\n",
       "  -0.2966491,\n",
       "  -0.103411056,\n",
       "  0.1341059,\n",
       "  0.008864066,\n",
       "  -0.206229,\n",
       "  -0.07938945,\n",
       "  -0.07140623,\n",
       "  0.31993836,\n",
       "  0.4054097,\n",
       "  -0.20978293,\n",
       "  -0.29439878,\n",
       "  -0.11234929,\n",
       "  -0.28512338,\n",
       "  -0.17522554,\n",
       "  0.2734084,\n",
       "  -0.24870822,\n",
       "  0.346744,\n",
       "  -0.047793385,\n",
       "  -0.4511114,\n",
       "  -0.2351898,\n",
       "  -0.42803493,\n",
       "  0.064090244,\n",
       "  0.24181914,\n",
       "  0.21957518,\n",
       "  -0.419534,\n",
       "  0.33041072,\n",
       "  0.21374363,\n",
       "  0.23666272,\n",
       "  -0.1321843,\n",
       "  -0.27235854,\n",
       "  -0.26837528,\n",
       "  -0.06206591,\n",
       "  0.24290363,\n",
       "  0.016884204,\n",
       "  0.4695604,\n",
       "  -0.2968223,\n",
       "  0.12671918,\n",
       "  -0.117082804,\n",
       "  0.40807846,\n",
       "  -0.2599302,\n",
       "  0.35137463,\n",
       "  -0.21640396,\n",
       "  -0.12140493,\n",
       "  0.204343,\n",
       "  -0.21782055,\n",
       "  0.15326099,\n",
       "  0.104253426,\n",
       "  -0.08200047,\n",
       "  -0.3036152,\n",
       "  0.03665658,\n",
       "  0.10779636,\n",
       "  -0.035846032,\n",
       "  -0.14429536,\n",
       "  -0.0552939,\n",
       "  0.49910447,\n",
       "  -0.21361572,\n",
       "  -0.09797525,\n",
       "  0.28304422,\n",
       "  0.25601318,\n",
       "  0.01701026,\n",
       "  0.018946351,\n",
       "  0.3411518,\n",
       "  0.58900523,\n",
       "  -0.40606403,\n",
       "  -0.2999592,\n",
       "  0.19532166,\n",
       "  -0.1710856],\n",
       " '7399': [-0.19650559,\n",
       "  -0.018341538,\n",
       "  -0.03883235,\n",
       "  -0.030843934,\n",
       "  -0.107311554,\n",
       "  0.075726464,\n",
       "  0.04611784,\n",
       "  0.14974242,\n",
       "  0.03685027,\n",
       "  -0.18002011,\n",
       "  -0.11442638,\n",
       "  0.15875351,\n",
       "  0.045619287,\n",
       "  -0.23143317,\n",
       "  0.23921259,\n",
       "  -0.05379493,\n",
       "  -0.17408945,\n",
       "  -0.2397486,\n",
       "  0.02267251,\n",
       "  -0.14131314,\n",
       "  0.18422042,\n",
       "  0.17680141,\n",
       "  0.10228132,\n",
       "  0.10339056,\n",
       "  -0.25235978,\n",
       "  0.12359247,\n",
       "  -0.17533565,\n",
       "  0.08515385,\n",
       "  0.022958808,\n",
       "  -0.3834242,\n",
       "  -0.0037285667,\n",
       "  0.3019823,\n",
       "  0.017727263,\n",
       "  -0.37860498,\n",
       "  -0.24547002,\n",
       "  0.16919301,\n",
       "  -0.22398515,\n",
       "  0.022105457,\n",
       "  -0.17055818,\n",
       "  0.02561382,\n",
       "  -0.09937134,\n",
       "  0.18240191,\n",
       "  -0.22246304,\n",
       "  0.08590509,\n",
       "  0.117828205,\n",
       "  0.28752446,\n",
       "  0.075401306,\n",
       "  -0.051088717,\n",
       "  -0.21285716,\n",
       "  -0.009648146,\n",
       "  0.15482214,\n",
       "  -0.46852607,\n",
       "  -0.33557114,\n",
       "  -0.29547238,\n",
       "  0.05089262,\n",
       "  -0.010777302,\n",
       "  0.44869587,\n",
       "  -0.24912556,\n",
       "  0.2890473,\n",
       "  0.07840594,\n",
       "  0.05892288,\n",
       "  -0.18927033,\n",
       "  0.0509044,\n",
       "  -0.13013944,\n",
       "  0.04008597,\n",
       "  0.067488015,\n",
       "  -0.056325942,\n",
       "  0.19674008,\n",
       "  -0.21669008,\n",
       "  0.25342998,\n",
       "  0.0368896,\n",
       "  0.22501081,\n",
       "  -0.116510846,\n",
       "  -0.025296966,\n",
       "  -0.09180106,\n",
       "  -0.15365393,\n",
       "  0.09248273,\n",
       "  -0.11601822,\n",
       "  0.22196086,\n",
       "  0.09304529,\n",
       "  -0.14483139,\n",
       "  -0.25"
      ]
     },
     "execution_count": 44,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "text/html": [
       "<b>limit_output extension: Maximum message size of 10000 exceeded with 1459707 characters</b>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "##  [利用 gensim 获取 fasttext 词向量](https://radimrehurek.com/gensim/models/fasttext.html)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [],
   "source": [
    "from gensim.models import FastText\n",
    "from gensim.test.utils import get_tmpfile\n",
    "from gensim.test.utils import datapath\n",
    "from gensim import utils\n",
    "from gensim.utils import tokenize"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [],
   "source": [
    "class MyIter(object):\n",
    "    def __iter__(self):\n",
    "        path = datapath('D:/project/python_wp/nlp/TextClassifier/data/corpus.csv')\n",
    "        with utils.open(path, 'r', encoding='utf-8') as fin:\n",
    "            for line in fin:\n",
    "                yield line.split(\" \")\n",
    "\n",
    "model = FastText(size=4, window=3, min_count=1)\n",
    "model.build_vocab(sentences=MyIter())\n",
    "total_examples = model.corpus_count\n",
    "model.train(sentences=MyIter(), total_examples=total_examples, epochs=5)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [],
   "source": [
    "from gensim.test.utils import get_tmpfile\n",
    "fname = get_tmpfile('D:/project/python_wp/nlp/TextClassifier/data/skipgram_model_gensim.bin')\n",
    "model.save(fname)\n",
    "model = FastText.load(fname)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "True"
      ]
     },
     "execution_count": 21,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "'4080' in model.wv.vocab"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([-0.9314406,  2.5237556, -3.3736963,  2.034601 ], dtype=float32)"
      ]
     },
     "execution_count": 22,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "model.wv['4080']"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "hide_input": false,
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.0"
  },
  "toc": {
   "base_numbering": 1,
   "nav_menu": {},
   "number_sections": true,
   "sideBar": true,
   "skip_h1_title": false,
   "title_cell": "Table of Contents",
   "title_sidebar": "Contents",
   "toc_cell": false,
   "toc_position": {},
   "toc_section_display": true,
   "toc_window_display": false
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
