{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\zake7\\Anaconda3\\lib\\site-packages\\gensim\\utils.py:1197: UserWarning: detected Windows; aliasing chunkize to chunkize_serial\n",
      "  warnings.warn(\"detected Windows; aliasing chunkize to chunkize_serial\")\n"
     ]
    }
   ],
   "source": [
    "import pandas as pd\n",
    "import numpy as np\n",
    "import gensim"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Load training set and testing set"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "train_df = pd.read_csv(\"../data/dataset/train.csv\")\n",
    "test_df = pd.read_csv(\"../data/dataset/test.csv\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "train_corpus = np.unique([v for v in np.concatenate([train_df.title1_zh.unique(), train_df.title2_zh.unique()]) if type(v) == str])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "test_corpus = np.unique([v for v in np.concatenate([test_df.title1_zh.unique(), test_df.title2_zh.unique()]) if type(v) == str])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "all_corpus = np.concatenate([train_corpus, test_corpus])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "with open('../data/corpus.txt', 'w', encoding='utf-8') as corpus:\n",
    "    for sentence in all_corpus:\n",
    "        for char in sentence:\n",
    "            corpus.write(char + ' ')\n",
    "        corpus.write('\\n')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Word2Vec"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2018-12-26 20:52:04,553 : WARNING : consider setting layer size to a multiple of 4 for greater performance\n",
      "2018-12-26 20:52:04,554 : INFO : collecting all words and their counts\n",
      "2018-12-26 20:52:04,554 : INFO : PROGRESS: at sentence #0, processed 0 words, keeping 0 word types\n",
      "2018-12-26 20:52:04,603 : INFO : PROGRESS: at sentence #10000, processed 268913 words, keeping 3122 word types\n",
      "2018-12-26 20:52:04,651 : INFO : PROGRESS: at sentence #20000, processed 527375 words, keeping 3764 word types\n",
      "2018-12-26 20:52:04,697 : INFO : PROGRESS: at sentence #30000, processed 776732 words, keeping 4033 word types\n",
      "2018-12-26 20:52:04,744 : INFO : PROGRESS: at sentence #40000, processed 1026248 words, keeping 4173 word types\n",
      "2018-12-26 20:52:04,792 : INFO : PROGRESS: at sentence #50000, processed 1274561 words, keeping 4291 word types\n",
      "2018-12-26 20:52:04,837 : INFO : PROGRESS: at sentence #60000, processed 1509703 words, keeping 4441 word types\n",
      "2018-12-26 20:52:04,884 : INFO : PROGRESS: at sentence #70000, processed 1758393 words, keeping 4532 word types\n",
      "2018-12-26 20:52:04,930 : INFO : PROGRESS: at sentence #80000, processed 2002019 words, keeping 4639 word types\n",
      "2018-12-26 20:52:04,976 : INFO : PROGRESS: at sentence #90000, processed 2243756 words, keeping 4717 word types\n",
      "2018-12-26 20:52:05,023 : INFO : PROGRESS: at sentence #100000, processed 2490952 words, keeping 4807 word types\n",
      "2018-12-26 20:52:05,068 : INFO : PROGRESS: at sentence #110000, processed 2731991 words, keeping 4863 word types\n",
      "2018-12-26 20:52:05,114 : INFO : PROGRESS: at sentence #120000, processed 2977027 words, keeping 4912 word types\n",
      "2018-12-26 20:52:05,161 : INFO : PROGRESS: at sentence #130000, processed 3219066 words, keeping 4977 word types\n",
      "2018-12-26 20:52:05,221 : INFO : PROGRESS: at sentence #140000, processed 3467501 words, keeping 5026 word types\n",
      "2018-12-26 20:52:05,267 : INFO : PROGRESS: at sentence #150000, processed 3711862 words, keeping 5080 word types\n",
      "2018-12-26 20:52:05,313 : INFO : PROGRESS: at sentence #160000, processed 3957823 words, keeping 5139 word types\n",
      "2018-12-26 20:52:05,360 : INFO : PROGRESS: at sentence #170000, processed 4212294 words, keeping 5194 word types\n",
      "2018-12-26 20:52:05,406 : INFO : PROGRESS: at sentence #180000, processed 4462312 words, keeping 5199 word types\n",
      "2018-12-26 20:52:05,453 : INFO : PROGRESS: at sentence #190000, processed 4708050 words, keeping 5207 word types\n",
      "2018-12-26 20:52:05,500 : INFO : PROGRESS: at sentence #200000, processed 4954533 words, keeping 5217 word types\n",
      "2018-12-26 20:52:05,546 : INFO : PROGRESS: at sentence #210000, processed 5197138 words, keeping 5226 word types\n",
      "2018-12-26 20:52:05,593 : INFO : PROGRESS: at sentence #220000, processed 5440500 words, keeping 5236 word types\n",
      "2018-12-26 20:52:05,639 : INFO : PROGRESS: at sentence #230000, processed 5692502 words, keeping 5250 word types\n",
      "2018-12-26 20:52:05,642 : INFO : collected 5250 word types from a corpus of 5701379 raw words and 230368 sentences\n",
      "2018-12-26 20:52:05,642 : INFO : Loading a fresh vocabulary\n",
      "2018-12-26 20:52:05,647 : INFO : min_count=5 retains 4113 unique words (78% of original 5250, drops 1137)\n",
      "2018-12-26 20:52:05,648 : INFO : min_count=5 leaves 5698971 word corpus (99% of original 5701379, drops 2408)\n",
      "2018-12-26 20:52:05,656 : INFO : deleting the raw counts dictionary of 5250 items\n",
      "2018-12-26 20:52:05,657 : INFO : sample=0.001 downsamples 51 most-common words\n",
      "2018-12-26 20:52:05,658 : INFO : downsampling leaves estimated 4976464 word corpus (87.3% of prior 5698971)\n",
      "2018-12-26 20:52:05,664 : INFO : estimated required memory for 4113 words and 50 dimensions: 3701700 bytes\n",
      "2018-12-26 20:52:05,665 : INFO : resetting layer weights\n",
      "2018-12-26 20:52:05,699 : INFO : training model with 3 workers on 4113 vocabulary and 50 features, using sg=0 hs=0 sample=0.001 negative=5 window=5\n",
      "2018-12-26 20:52:06,704 : INFO : EPOCH 1 - PROGRESS: at 51.91% examples, 2585118 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:07,589 : INFO : worker thread finished; awaiting finish of 2 more threads\n",
      "2018-12-26 20:52:07,594 : INFO : worker thread finished; awaiting finish of 1 more threads\n",
      "2018-12-26 20:52:07,595 : INFO : worker thread finished; awaiting finish of 0 more threads\n",
      "2018-12-26 20:52:07,596 : INFO : EPOCH - 1 : training on 5701379 raw words (4975766 effective words) took 1.9s, 2625653 effective words/s\n",
      "2018-12-26 20:52:08,598 : INFO : EPOCH 2 - PROGRESS: at 52.77% examples, 2632769 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:09,484 : INFO : worker thread finished; awaiting finish of 2 more threads\n",
      "2018-12-26 20:52:09,486 : INFO : worker thread finished; awaiting finish of 1 more threads\n",
      "2018-12-26 20:52:09,489 : INFO : worker thread finished; awaiting finish of 0 more threads\n",
      "2018-12-26 20:52:09,489 : INFO : EPOCH - 2 : training on 5701379 raw words (4976680 effective words) took 1.9s, 2630273 effective words/s\n",
      "2018-12-26 20:52:10,492 : INFO : EPOCH 3 - PROGRESS: at 53.13% examples, 2649721 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:11,354 : INFO : worker thread finished; awaiting finish of 2 more threads\n",
      "2018-12-26 20:52:11,357 : INFO : worker thread finished; awaiting finish of 1 more threads\n",
      "2018-12-26 20:52:11,360 : INFO : worker thread finished; awaiting finish of 0 more threads\n",
      "2018-12-26 20:52:11,360 : INFO : EPOCH - 3 : training on 5701379 raw words (4976435 effective words) took 1.9s, 2661618 effective words/s\n",
      "2018-12-26 20:52:12,364 : INFO : EPOCH 4 - PROGRESS: at 53.13% examples, 2648344 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:13,224 : INFO : worker thread finished; awaiting finish of 2 more threads\n",
      "2018-12-26 20:52:13,227 : INFO : worker thread finished; awaiting finish of 1 more threads\n",
      "2018-12-26 20:52:13,229 : INFO : worker thread finished; awaiting finish of 0 more threads\n",
      "2018-12-26 20:52:13,230 : INFO : EPOCH - 4 : training on 5701379 raw words (4977555 effective words) took 1.9s, 2664742 effective words/s\n",
      "2018-12-26 20:52:14,234 : INFO : EPOCH 5 - PROGRESS: at 52.25% examples, 2601682 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:15,128 : INFO : worker thread finished; awaiting finish of 2 more threads\n",
      "2018-12-26 20:52:15,133 : INFO : worker thread finished; awaiting finish of 1 more threads\n",
      "2018-12-26 20:52:15,134 : INFO : worker thread finished; awaiting finish of 0 more threads\n",
      "2018-12-26 20:52:15,135 : INFO : EPOCH - 5 : training on 5701379 raw words (4976123 effective words) took 1.9s, 2612707 effective words/s\n",
      "2018-12-26 20:52:15,135 : INFO : training on a 28506895 raw words (24882559 effective words) took 9.4s, 2636916 effective words/s\n",
      "2018-12-26 20:52:15,136 : INFO : saving Word2Vec object under word2vec.model, separately None\n",
      "2018-12-26 20:52:15,136 : INFO : not storing attribute vectors_norm\n",
      "2018-12-26 20:52:15,137 : INFO : not storing attribute cum_table\n",
      "2018-12-26 20:52:15,159 : INFO : saved word2vec.model\n"
     ]
    }
   ],
   "source": [
    "import logging\n",
    "from gensim.models import word2vec\n",
    "logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)\n",
    "sentences = word2vec.LineSentence('../data/corpus.txt')\n",
    "model = word2vec.Word2Vec(sentences, sg=0, hs=0, window=5, size=50, min_count=5)\n",
    "model.save(\"word2vec.model\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2018-12-26 20:52:15,163 : INFO : storing 4113x50 projection weights into ../data/wordvec/zh-wordvec-50-cbow-windowsize50.vec\n"
     ]
    }
   ],
   "source": [
    "model.wv.save_word2vec_format('../data/wordvec/zh-wordvec-50-cbow-windowsize50.vec', binary=False)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Skipgram"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2018-12-26 20:52:15,266 : WARNING : consider setting layer size to a multiple of 4 for greater performance\n",
      "2018-12-26 20:52:15,267 : INFO : collecting all words and their counts\n",
      "2018-12-26 20:52:15,267 : INFO : PROGRESS: at sentence #0, processed 0 words, keeping 0 word types\n",
      "2018-12-26 20:52:15,316 : INFO : PROGRESS: at sentence #10000, processed 268913 words, keeping 3122 word types\n",
      "2018-12-26 20:52:15,364 : INFO : PROGRESS: at sentence #20000, processed 527375 words, keeping 3764 word types\n",
      "2018-12-26 20:52:15,412 : INFO : PROGRESS: at sentence #30000, processed 776732 words, keeping 4033 word types\n",
      "2018-12-26 20:52:15,459 : INFO : PROGRESS: at sentence #40000, processed 1026248 words, keeping 4173 word types\n",
      "2018-12-26 20:52:15,505 : INFO : PROGRESS: at sentence #50000, processed 1274561 words, keeping 4291 word types\n",
      "2018-12-26 20:52:15,551 : INFO : PROGRESS: at sentence #60000, processed 1509703 words, keeping 4441 word types\n",
      "2018-12-26 20:52:15,598 : INFO : PROGRESS: at sentence #70000, processed 1758393 words, keeping 4532 word types\n",
      "2018-12-26 20:52:15,644 : INFO : PROGRESS: at sentence #80000, processed 2002019 words, keeping 4639 word types\n",
      "2018-12-26 20:52:15,691 : INFO : PROGRESS: at sentence #90000, processed 2243756 words, keeping 4717 word types\n",
      "2018-12-26 20:52:15,738 : INFO : PROGRESS: at sentence #100000, processed 2490952 words, keeping 4807 word types\n",
      "2018-12-26 20:52:15,784 : INFO : PROGRESS: at sentence #110000, processed 2731991 words, keeping 4863 word types\n",
      "2018-12-26 20:52:15,831 : INFO : PROGRESS: at sentence #120000, processed 2977027 words, keeping 4912 word types\n",
      "2018-12-26 20:52:15,877 : INFO : PROGRESS: at sentence #130000, processed 3219066 words, keeping 4977 word types\n",
      "2018-12-26 20:52:15,925 : INFO : PROGRESS: at sentence #140000, processed 3467501 words, keeping 5026 word types\n",
      "2018-12-26 20:52:15,972 : INFO : PROGRESS: at sentence #150000, processed 3711862 words, keeping 5080 word types\n",
      "2018-12-26 20:52:16,020 : INFO : PROGRESS: at sentence #160000, processed 3957823 words, keeping 5139 word types\n",
      "2018-12-26 20:52:16,068 : INFO : PROGRESS: at sentence #170000, processed 4212294 words, keeping 5194 word types\n",
      "2018-12-26 20:52:16,116 : INFO : PROGRESS: at sentence #180000, processed 4462312 words, keeping 5199 word types\n",
      "2018-12-26 20:52:16,163 : INFO : PROGRESS: at sentence #190000, processed 4708050 words, keeping 5207 word types\n",
      "2018-12-26 20:52:16,210 : INFO : PROGRESS: at sentence #200000, processed 4954533 words, keeping 5217 word types\n",
      "2018-12-26 20:52:16,256 : INFO : PROGRESS: at sentence #210000, processed 5197138 words, keeping 5226 word types\n",
      "2018-12-26 20:52:16,303 : INFO : PROGRESS: at sentence #220000, processed 5440500 words, keeping 5236 word types\n",
      "2018-12-26 20:52:16,350 : INFO : PROGRESS: at sentence #230000, processed 5692502 words, keeping 5250 word types\n",
      "2018-12-26 20:52:16,353 : INFO : collected 5250 word types from a corpus of 5701379 raw words and 230368 sentences\n",
      "2018-12-26 20:52:16,353 : INFO : Loading a fresh vocabulary\n",
      "2018-12-26 20:52:16,358 : INFO : min_count=5 retains 4113 unique words (78% of original 5250, drops 1137)\n",
      "2018-12-26 20:52:16,359 : INFO : min_count=5 leaves 5698971 word corpus (99% of original 5701379, drops 2408)\n",
      "2018-12-26 20:52:16,366 : INFO : deleting the raw counts dictionary of 5250 items\n",
      "2018-12-26 20:52:16,367 : INFO : sample=0.001 downsamples 51 most-common words\n",
      "2018-12-26 20:52:16,368 : INFO : downsampling leaves estimated 4976464 word corpus (87.3% of prior 5698971)\n",
      "2018-12-26 20:52:16,370 : INFO : constructing a huffman tree from 4113 words\n",
      "2018-12-26 20:52:16,428 : INFO : built huffman tree with maximum node depth 20\n",
      "2018-12-26 20:52:16,432 : INFO : estimated required memory for 4113 words and 50 dimensions: 5346900 bytes\n",
      "2018-12-26 20:52:16,433 : INFO : resetting layer weights\n",
      "2018-12-26 20:52:16,465 : INFO : training model with 3 workers on 4113 vocabulary and 50 features, using sg=1 hs=1 sample=0.001 negative=5 window=7\n",
      "2018-12-26 20:52:17,480 : INFO : EPOCH 1 - PROGRESS: at 5.01% examples, 265227 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:18,496 : INFO : EPOCH 1 - PROGRESS: at 10.65% examples, 273355 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:19,518 : INFO : EPOCH 1 - PROGRESS: at 16.28% examples, 273748 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:20,529 : INFO : EPOCH 1 - PROGRESS: at 21.96% examples, 276381 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:21,535 : INFO : EPOCH 1 - PROGRESS: at 27.91% examples, 278157 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:22,546 : INFO : EPOCH 1 - PROGRESS: at 33.58% examples, 278106 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:23,572 : INFO : EPOCH 1 - PROGRESS: at 39.49% examples, 278680 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:24,577 : INFO : EPOCH 1 - PROGRESS: at 45.12% examples, 278882 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:25,598 : INFO : EPOCH 1 - PROGRESS: at 50.85% examples, 278337 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:26,610 : INFO : EPOCH 1 - PROGRESS: at 56.73% examples, 278885 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:27,631 : INFO : EPOCH 1 - PROGRESS: at 62.50% examples, 279117 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:28,636 : INFO : EPOCH 1 - PROGRESS: at 68.17% examples, 278718 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:29,667 : INFO : EPOCH 1 - PROGRESS: at 73.84% examples, 278724 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:30,691 : INFO : EPOCH 1 - PROGRESS: at 79.59% examples, 278778 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:31,721 : INFO : EPOCH 1 - PROGRESS: at 85.23% examples, 278284 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:32,755 : INFO : EPOCH 1 - PROGRESS: at 91.09% examples, 278361 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:33,780 : INFO : EPOCH 1 - PROGRESS: at 96.98% examples, 278427 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:34,243 : INFO : worker thread finished; awaiting finish of 2 more threads\n",
      "2018-12-26 20:52:34,301 : INFO : worker thread finished; awaiting finish of 1 more threads\n",
      "2018-12-26 20:52:34,312 : INFO : worker thread finished; awaiting finish of 0 more threads\n",
      "2018-12-26 20:52:34,312 : INFO : EPOCH - 1 : training on 5701379 raw words (4975743 effective words) took 17.8s, 278812 effective words/s\n",
      "2018-12-26 20:52:35,361 : INFO : EPOCH 2 - PROGRESS: at 5.01% examples, 257024 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:36,383 : INFO : EPOCH 2 - PROGRESS: at 10.65% examples, 268538 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:37,401 : INFO : EPOCH 2 - PROGRESS: at 16.44% examples, 273534 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:38,422 : INFO : EPOCH 2 - PROGRESS: at 22.15% examples, 275577 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:39,427 : INFO : EPOCH 2 - PROGRESS: at 27.91% examples, 275860 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:40,469 : INFO : EPOCH 2 - PROGRESS: at 33.76% examples, 276181 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:41,505 : INFO : EPOCH 2 - PROGRESS: at 39.65% examples, 276653 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:42,530 : INFO : EPOCH 2 - PROGRESS: at 45.48% examples, 277491 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:43,554 : INFO : EPOCH 2 - PROGRESS: at 51.38% examples, 277900 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:44,582 : INFO : EPOCH 2 - PROGRESS: at 57.27% examples, 278094 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:45,593 : INFO : EPOCH 2 - PROGRESS: at 63.03% examples, 278673 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:46,593 : INFO : EPOCH 2 - PROGRESS: at 68.87% examples, 279126 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:47,619 : INFO : EPOCH 2 - PROGRESS: at 74.34% examples, 278548 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:48,633 : INFO : EPOCH 2 - PROGRESS: at 80.12% examples, 278795 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:49,659 : INFO : EPOCH 2 - PROGRESS: at 85.94% examples, 278936 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:50,686 : INFO : EPOCH 2 - PROGRESS: at 91.83% examples, 279033 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:51,692 : INFO : EPOCH 2 - PROGRESS: at 97.61% examples, 279388 words/s, in_qsize 5, out_qsize 0\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2018-12-26 20:52:52,059 : INFO : worker thread finished; awaiting finish of 2 more threads\n",
      "2018-12-26 20:52:52,102 : INFO : worker thread finished; awaiting finish of 1 more threads\n",
      "2018-12-26 20:52:52,113 : INFO : worker thread finished; awaiting finish of 0 more threads\n",
      "2018-12-26 20:52:52,114 : INFO : EPOCH - 2 : training on 5701379 raw words (4976156 effective words) took 17.8s, 279562 effective words/s\n",
      "2018-12-26 20:52:53,134 : INFO : EPOCH 3 - PROGRESS: at 5.01% examples, 264095 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:54,147 : INFO : EPOCH 3 - PROGRESS: at 10.65% examples, 273185 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:55,166 : INFO : EPOCH 3 - PROGRESS: at 16.44% examples, 276571 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:56,184 : INFO : EPOCH 3 - PROGRESS: at 22.15% examples, 278048 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:57,203 : INFO : EPOCH 3 - PROGRESS: at 28.09% examples, 278838 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:58,207 : INFO : EPOCH 3 - PROGRESS: at 33.76% examples, 278942 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:52:59,218 : INFO : EPOCH 3 - PROGRESS: at 39.49% examples, 278813 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:00,260 : INFO : EPOCH 3 - PROGRESS: at 45.29% examples, 278833 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:01,272 : INFO : EPOCH 3 - PROGRESS: at 51.19% examples, 279483 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:02,300 : INFO : EPOCH 3 - PROGRESS: at 57.09% examples, 279447 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:03,318 : INFO : EPOCH 3 - PROGRESS: at 62.86% examples, 279748 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:04,323 : INFO : EPOCH 3 - PROGRESS: at 68.70% examples, 280044 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:05,347 : INFO : EPOCH 3 - PROGRESS: at 74.34% examples, 280059 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:06,355 : INFO : EPOCH 3 - PROGRESS: at 80.12% examples, 280368 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:07,371 : INFO : EPOCH 3 - PROGRESS: at 85.94% examples, 280570 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:08,399 : INFO : EPOCH 3 - PROGRESS: at 91.83% examples, 280583 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:09,400 : INFO : EPOCH 3 - PROGRESS: at 97.61% examples, 280908 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:09,766 : INFO : worker thread finished; awaiting finish of 2 more threads\n",
      "2018-12-26 20:53:09,827 : INFO : worker thread finished; awaiting finish of 1 more threads\n",
      "2018-12-26 20:53:09,833 : INFO : worker thread finished; awaiting finish of 0 more threads\n",
      "2018-12-26 20:53:09,834 : INFO : EPOCH - 3 : training on 5701379 raw words (4976371 effective words) took 17.7s, 280847 effective words/s\n",
      "2018-12-26 20:53:10,855 : INFO : EPOCH 4 - PROGRESS: at 5.01% examples, 263638 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:11,878 : INFO : EPOCH 4 - PROGRESS: at 10.65% examples, 271749 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:12,899 : INFO : EPOCH 4 - PROGRESS: at 16.44% examples, 275510 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:13,923 : INFO : EPOCH 4 - PROGRESS: at 22.15% examples, 276841 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:14,945 : INFO : EPOCH 4 - PROGRESS: at 28.09% examples, 277751 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:15,980 : INFO : EPOCH 4 - PROGRESS: at 33.94% examples, 278112 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:17,004 : INFO : EPOCH 4 - PROGRESS: at 39.84% examples, 278651 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:18,012 : INFO : EPOCH 4 - PROGRESS: at 45.48% examples, 278796 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:19,030 : INFO : EPOCH 4 - PROGRESS: at 51.38% examples, 279240 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:20,059 : INFO : EPOCH 4 - PROGRESS: at 57.27% examples, 279253 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:21,090 : INFO : EPOCH 4 - PROGRESS: at 63.03% examples, 279245 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:22,098 : INFO : EPOCH 4 - PROGRESS: at 68.87% examples, 279503 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:23,132 : INFO : EPOCH 4 - PROGRESS: at 74.49% examples, 279375 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:24,161 : INFO : EPOCH 4 - PROGRESS: at 80.30% examples, 279286 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:25,201 : INFO : EPOCH 4 - PROGRESS: at 86.11% examples, 279146 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:26,218 : INFO : EPOCH 4 - PROGRESS: at 91.99% examples, 279389 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:27,239 : INFO : EPOCH 4 - PROGRESS: at 97.77% examples, 279471 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:27,551 : INFO : worker thread finished; awaiting finish of 2 more threads\n",
      "2018-12-26 20:53:27,610 : INFO : worker thread finished; awaiting finish of 1 more threads\n",
      "2018-12-26 20:53:27,614 : INFO : worker thread finished; awaiting finish of 0 more threads\n",
      "2018-12-26 20:53:27,615 : INFO : EPOCH - 4 : training on 5701379 raw words (4976357 effective words) took 17.8s, 279900 effective words/s\n",
      "2018-12-26 20:53:28,656 : INFO : EPOCH 5 - PROGRESS: at 5.01% examples, 258673 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:29,691 : INFO : EPOCH 5 - PROGRESS: at 10.65% examples, 267612 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:30,742 : INFO : EPOCH 5 - PROGRESS: at 16.44% examples, 270095 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:31,777 : INFO : EPOCH 5 - PROGRESS: at 22.15% examples, 272097 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:32,808 : INFO : EPOCH 5 - PROGRESS: at 28.09% examples, 273328 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:33,850 : INFO : EPOCH 5 - PROGRESS: at 33.94% examples, 274121 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:34,880 : INFO : EPOCH 5 - PROGRESS: at 39.83% examples, 275022 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:35,905 : INFO : EPOCH 5 - PROGRESS: at 45.48% examples, 275004 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:36,952 : INFO : EPOCH 5 - PROGRESS: at 51.38% examples, 275014 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:37,986 : INFO : EPOCH 5 - PROGRESS: at 57.27% examples, 275313 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:39,023 : INFO : EPOCH 5 - PROGRESS: at 63.03% examples, 275508 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:40,031 : INFO : EPOCH 5 - PROGRESS: at 68.87% examples, 276045 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:41,035 : INFO : EPOCH 5 - PROGRESS: at 74.34% examples, 276154 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:42,066 : INFO : EPOCH 5 - PROGRESS: at 80.12% examples, 276255 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:43,104 : INFO : EPOCH 5 - PROGRESS: at 85.94% examples, 276361 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:44,145 : INFO : EPOCH 5 - PROGRESS: at 91.83% examples, 276407 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:45,169 : INFO : EPOCH 5 - PROGRESS: at 97.61% examples, 276628 words/s, in_qsize 5, out_qsize 0\n",
      "2018-12-26 20:53:45,545 : INFO : worker thread finished; awaiting finish of 2 more threads\n",
      "2018-12-26 20:53:45,571 : INFO : worker thread finished; awaiting finish of 1 more threads\n",
      "2018-12-26 20:53:45,585 : INFO : worker thread finished; awaiting finish of 0 more threads\n",
      "2018-12-26 20:53:45,585 : INFO : EPOCH - 5 : training on 5701379 raw words (4976780 effective words) took 18.0s, 276962 effective words/s\n",
      "2018-12-26 20:53:45,586 : INFO : training on a 28506895 raw words (24881407 effective words) took 89.1s, 279190 effective words/s\n",
      "2018-12-26 20:53:45,587 : INFO : storing 4113x50 projection weights into ../data/wordvec/zh-wordvec-50-skipgram-windowsize7.vec\n"
     ]
    }
   ],
   "source": [
    "model = word2vec.Word2Vec(sentences, sg=1, hs=1, window=7, size=50, min_count=5)\n",
    "model.wv.save_word2vec_format('../data/wordvec/zh-wordvec-50-skipgram-windowsize7.vec', binary=False)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Fasttext"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2018-12-26 20:53:45,690 : WARNING : consider setting layer size to a multiple of 4 for greater performance\n",
      "2018-12-26 20:53:45,690 : INFO : collecting all words and their counts\n",
      "2018-12-26 20:53:45,691 : INFO : PROGRESS: at sentence #0, processed 0 words, keeping 0 word types\n",
      "2018-12-26 20:53:45,739 : INFO : PROGRESS: at sentence #10000, processed 268913 words, keeping 3122 word types\n",
      "2018-12-26 20:53:45,787 : INFO : PROGRESS: at sentence #20000, processed 527375 words, keeping 3764 word types\n",
      "2018-12-26 20:53:45,834 : INFO : PROGRESS: at sentence #30000, processed 776732 words, keeping 4033 word types\n",
      "2018-12-26 20:53:45,881 : INFO : PROGRESS: at sentence #40000, processed 1026248 words, keeping 4173 word types\n",
      "2018-12-26 20:53:45,927 : INFO : PROGRESS: at sentence #50000, processed 1274561 words, keeping 4291 word types\n",
      "2018-12-26 20:53:45,972 : INFO : PROGRESS: at sentence #60000, processed 1509703 words, keeping 4441 word types\n",
      "2018-12-26 20:53:46,018 : INFO : PROGRESS: at sentence #70000, processed 1758393 words, keeping 4532 word types\n",
      "2018-12-26 20:53:46,065 : INFO : PROGRESS: at sentence #80000, processed 2002019 words, keeping 4639 word types\n",
      "2018-12-26 20:53:46,110 : INFO : PROGRESS: at sentence #90000, processed 2243756 words, keeping 4717 word types\n",
      "2018-12-26 20:53:46,157 : INFO : PROGRESS: at sentence #100000, processed 2490952 words, keeping 4807 word types\n",
      "2018-12-26 20:53:46,203 : INFO : PROGRESS: at sentence #110000, processed 2731991 words, keeping 4863 word types\n",
      "2018-12-26 20:53:46,250 : INFO : PROGRESS: at sentence #120000, processed 2977027 words, keeping 4912 word types\n",
      "2018-12-26 20:53:46,296 : INFO : PROGRESS: at sentence #130000, processed 3219066 words, keeping 4977 word types\n",
      "2018-12-26 20:53:46,343 : INFO : PROGRESS: at sentence #140000, processed 3467501 words, keeping 5026 word types\n",
      "2018-12-26 20:53:46,388 : INFO : PROGRESS: at sentence #150000, processed 3711862 words, keeping 5080 word types\n",
      "2018-12-26 20:53:46,434 : INFO : PROGRESS: at sentence #160000, processed 3957823 words, keeping 5139 word types\n",
      "2018-12-26 20:53:46,482 : INFO : PROGRESS: at sentence #170000, processed 4212294 words, keeping 5194 word types\n",
      "2018-12-26 20:53:46,529 : INFO : PROGRESS: at sentence #180000, processed 4462312 words, keeping 5199 word types\n",
      "2018-12-26 20:53:46,575 : INFO : PROGRESS: at sentence #190000, processed 4708050 words, keeping 5207 word types\n",
      "2018-12-26 20:53:46,622 : INFO : PROGRESS: at sentence #200000, processed 4954533 words, keeping 5217 word types\n",
      "2018-12-26 20:53:46,668 : INFO : PROGRESS: at sentence #210000, processed 5197138 words, keeping 5226 word types\n",
      "2018-12-26 20:53:46,714 : INFO : PROGRESS: at sentence #220000, processed 5440500 words, keeping 5236 word types\n",
      "2018-12-26 20:53:46,760 : INFO : PROGRESS: at sentence #230000, processed 5692502 words, keeping 5250 word types\n",
      "2018-12-26 20:53:46,763 : INFO : collected 5250 word types from a corpus of 5701379 raw words and 230368 sentences\n",
      "2018-12-26 20:53:46,763 : INFO : Loading a fresh vocabulary\n",
      "2018-12-26 20:53:46,767 : INFO : min_count=5 retains 4113 unique words (78% of original 5250, drops 1137)\n",
      "2018-12-26 20:53:46,768 : INFO : min_count=5 leaves 5698971 word corpus (99% of original 5701379, drops 2408)\n",
      "2018-12-26 20:53:46,776 : INFO : deleting the raw counts dictionary of 5250 items\n",
      "2018-12-26 20:53:46,776 : INFO : sample=0.001 downsamples 51 most-common words\n",
      "2018-12-26 20:53:46,777 : INFO : downsampling leaves estimated 4976464 word corpus (87.3% of prior 5698971)\n",
      "2018-12-26 20:53:46,807 : INFO : estimated required memory for 4113 words, 4091 buckets and 50 dimensions: 4750228 bytes\n",
      "2018-12-26 20:53:46,808 : INFO : resetting layer weights\n",
      "2018-12-26 20:53:46,959 : INFO : Total number of ngrams is 4091\n",
      "2018-12-26 20:53:47,019 : INFO : training model with 3 workers on 4113 vocabulary and 50 features, using sg=0 hs=0 sample=0.001 negative=5 window=3\n",
      "2018-12-26 20:53:48,021 : INFO : EPOCH 1 - PROGRESS: at 15.25% examples, 782601 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:53:49,021 : INFO : EPOCH 1 - PROGRESS: at 31.27% examples, 787718 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:53:50,026 : INFO : EPOCH 1 - PROGRESS: at 47.30% examples, 787408 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:53:51,033 : INFO : EPOCH 1 - PROGRESS: at 63.39% examples, 787612 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:53:52,040 : INFO : EPOCH 1 - PROGRESS: at 79.07% examples, 784954 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:53:53,051 : INFO : EPOCH 1 - PROGRESS: at 95.01% examples, 783639 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:53:53,355 : INFO : worker thread finished; awaiting finish of 2 more threads\n",
      "2018-12-26 20:53:53,356 : INFO : worker thread finished; awaiting finish of 1 more threads\n",
      "2018-12-26 20:53:53,364 : INFO : worker thread finished; awaiting finish of 0 more threads\n",
      "2018-12-26 20:53:53,365 : INFO : EPOCH - 1 : training on 5701379 raw words (4977454 effective words) took 6.3s, 784540 effective words/s\n",
      "2018-12-26 20:53:54,376 : INFO : EPOCH 2 - PROGRESS: at 15.62% examples, 793198 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:53:55,382 : INFO : EPOCH 2 - PROGRESS: at 31.79% examples, 794513 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:53:56,391 : INFO : EPOCH 2 - PROGRESS: at 47.84% examples, 790997 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:53:57,398 : INFO : EPOCH 2 - PROGRESS: at 63.92% examples, 790094 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:53:58,407 : INFO : EPOCH 2 - PROGRESS: at 79.41% examples, 784873 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:53:59,417 : INFO : EPOCH 2 - PROGRESS: at 95.19% examples, 782336 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:53:59,707 : INFO : worker thread finished; awaiting finish of 2 more threads\n",
      "2018-12-26 20:53:59,708 : INFO : worker thread finished; awaiting finish of 1 more threads\n",
      "2018-12-26 20:53:59,716 : INFO : worker thread finished; awaiting finish of 0 more threads\n",
      "2018-12-26 20:53:59,716 : INFO : EPOCH - 2 : training on 5701379 raw words (4976351 effective words) took 6.4s, 783620 effective words/s\n",
      "2018-12-26 20:54:00,725 : INFO : EPOCH 3 - PROGRESS: at 15.79% examples, 803726 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:54:01,726 : INFO : EPOCH 3 - PROGRESS: at 31.79% examples, 797726 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:54:02,737 : INFO : EPOCH 3 - PROGRESS: at 47.84% examples, 792552 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:54:03,742 : INFO : EPOCH 3 - PROGRESS: at 63.75% examples, 789441 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:54:04,751 : INFO : EPOCH 3 - PROGRESS: at 79.59% examples, 787980 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:54:05,762 : INFO : EPOCH 3 - PROGRESS: at 94.30% examples, 775949 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:54:06,118 : INFO : worker thread finished; awaiting finish of 2 more threads\n",
      "2018-12-26 20:54:06,120 : INFO : worker thread finished; awaiting finish of 1 more threads\n",
      "2018-12-26 20:54:06,128 : INFO : worker thread finished; awaiting finish of 0 more threads\n",
      "2018-12-26 20:54:06,128 : INFO : EPOCH - 3 : training on 5701379 raw words (4976372 effective words) took 6.4s, 776286 effective words/s\n",
      "2018-12-26 20:54:07,133 : INFO : EPOCH 4 - PROGRESS: at 15.43% examples, 789009 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:54:08,138 : INFO : EPOCH 4 - PROGRESS: at 31.44% examples, 788704 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:54:09,141 : INFO : EPOCH 4 - PROGRESS: at 47.48% examples, 788853 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:54:10,150 : INFO : EPOCH 4 - PROGRESS: at 63.75% examples, 790364 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:54:11,151 : INFO : EPOCH 4 - PROGRESS: at 79.59% examples, 789780 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:54:12,162 : INFO : EPOCH 4 - PROGRESS: at 95.74% examples, 789273 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:54:12,419 : INFO : worker thread finished; awaiting finish of 2 more threads\n",
      "2018-12-26 20:54:12,420 : INFO : worker thread finished; awaiting finish of 1 more threads\n",
      "2018-12-26 20:54:12,428 : INFO : worker thread finished; awaiting finish of 0 more threads\n",
      "2018-12-26 20:54:12,428 : INFO : EPOCH - 4 : training on 5701379 raw words (4976758 effective words) took 6.3s, 790094 effective words/s\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2018-12-26 20:54:13,430 : INFO : EPOCH 5 - PROGRESS: at 15.43% examples, 791455 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:54:14,432 : INFO : EPOCH 5 - PROGRESS: at 31.08% examples, 782357 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:54:15,438 : INFO : EPOCH 5 - PROGRESS: at 46.93% examples, 780850 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:54:16,440 : INFO : EPOCH 5 - PROGRESS: at 62.68% examples, 779129 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:54:17,441 : INFO : EPOCH 5 - PROGRESS: at 78.36% examples, 779186 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:54:18,442 : INFO : EPOCH 5 - PROGRESS: at 93.57% examples, 774114 words/s, in_qsize 0, out_qsize 0\n",
      "2018-12-26 20:54:18,838 : INFO : worker thread finished; awaiting finish of 2 more threads\n",
      "2018-12-26 20:54:18,839 : INFO : worker thread finished; awaiting finish of 1 more threads\n",
      "2018-12-26 20:54:18,848 : INFO : worker thread finished; awaiting finish of 0 more threads\n",
      "2018-12-26 20:54:18,848 : INFO : EPOCH - 5 : training on 5701379 raw words (4976648 effective words) took 6.4s, 775349 effective words/s\n",
      "2018-12-26 20:54:18,849 : INFO : training on a 28506895 raw words (24883583 effective words) took 31.8s, 781780 effective words/s\n"
     ]
    }
   ],
   "source": [
    "from gensim.models import FastText\n",
    "fasttext_model = FastText(sentences, size=50, window=3, min_count=5)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2018-12-26 20:54:18,875 : INFO : storing 4113x50 projection weights into ../data/wordvec/fasttext-50-win3.vec\n"
     ]
    }
   ],
   "source": [
    "fasttext_model.wv.save_word2vec_format('../data/wordvec/fasttext-50-win3.vec', binary=False)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
