{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [],
   "source": [
    "import argparse\n",
    "import math\n",
    "import struct\n",
    "import sys\n",
    "import time\n",
    "import warnings\n",
    "\n",
    "import numpy as np\n",
    "\n",
    "from multiprocessing import Pool, Value, Array\n",
    "\n",
    "class VocabItem:\n",
    "    def __init__(self, word):\n",
    "        self.word = word\n",
    "        self.count = 0\n",
    "        self.path = None # Path (list of indices) from the root to the word (leaf)\n",
    "        self.code = None # Huffman encoding\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [],
   "source": [
    "class Vocab:\n",
    "    def __init__(self, fi, min_count):\n",
    "        vocab_items = []\n",
    "        vocab_hash = {}\n",
    "        word_count = 0\n",
    "\n",
    "        # Add special tokens <bol> (beginning of line) and <eol> (end of line)\n",
    "        for token in ['<bol>', '<eol>']:\n",
    "            vocab_hash[token] = len(vocab_items)\n",
    "            vocab_items.append(VocabItem(token))\n",
    "\n",
    "        fi = open(fi,\"r\")\n",
    "        for line in fi:\n",
    "            tokens = line.split()\n",
    "            for token in tokens:\n",
    "                if token not in vocab_hash:\n",
    "                    vocab_hash[token] = len(vocab_items)\n",
    "                    vocab_items.append(VocabItem(token))\n",
    "                    \n",
    "                #assert vocab_items[vocab_hash[token]].word == token, 'Wrong vocab_hash index'\n",
    "                vocab_items[vocab_hash[token]].count += 1\n",
    "                word_count += 1\n",
    "            \n",
    "                if word_count % 10000 == 0:\n",
    "                    sys.stdout.write(\"\\rReading word %d\" % word_count)\n",
    "                    sys.stdout.flush()\n",
    "\n",
    "            # Add special tokens <bol> (beginning of line) and <eol> (end of line)\n",
    "            vocab_items[vocab_hash['<bol>']].count += 1\n",
    "            vocab_items[vocab_hash['<eol>']].count += 1\n",
    "            word_count += 2\n",
    "\n",
    "        self.bytes = fi.tell()\n",
    "        self.vocab_items = vocab_items         # List of VocabItem objects\n",
    "        self.vocab_hash = vocab_hash           # Mapping from each token to its index in vocab\n",
    "        self.word_count = word_count           # Total number of words in train file\n",
    "\n",
    "        # Add special token <unk> (unknown),\n",
    "        # merge words occurring less than min_count into <unk>, and\n",
    "        # sort vocab in descending order by frequency in train file\n",
    "        self.__sort(min_count)\n",
    "\n",
    "        #assert self.word_count == sum([t.count for t in self.vocab_items]), 'word_count and sum of t.count do not agree'\n",
    "        print('Total words in training file: %d' % self.word_count)\n",
    "        print('Total bytes in training file: %d' % self.bytes)\n",
    "        print('Vocab size: %d' % len(self))\n",
    "\n",
    "    def __getitem__(self, i):\n",
    "        return self.vocab_items[i]\n",
    "\n",
    "    def __len__(self):\n",
    "        return len(self.vocab_items)\n",
    "\n",
    "    def __iter__(self):\n",
    "        return iter(self.vocab_items)\n",
    "\n",
    "    def __contains__(self, key):\n",
    "        return key in self.vocab_hash\n",
    "\n",
    "    def __sort(self, min_count):\n",
    "        tmp = []\n",
    "        tmp.append(VocabItem('<unk>'))\n",
    "        unk_hash = 0\n",
    "        \n",
    "        count_unk = 0\n",
    "        for token in self.vocab_items:\n",
    "            if token.count < min_count:\n",
    "                count_unk += 1\n",
    "                tmp[unk_hash].count += token.count\n",
    "            else:\n",
    "                tmp.append(token)\n",
    "\n",
    "        tmp.sort(key=lambda token : token.count, reverse=True)\n",
    "\n",
    "        # Update vocab_hash\n",
    "        vocab_hash = {}\n",
    "        for i, token in enumerate(tmp):\n",
    "            vocab_hash[token.word] = i\n",
    "\n",
    "        self.vocab_items = tmp\n",
    "        self.vocab_hash = vocab_hash\n",
    "\n",
    "        print()\n",
    "        print('Unknown vocab size:', count_unk)\n",
    "\n",
    "    def indices(self, tokens):\n",
    "        return [self.vocab_hash[token] if token in self else self.vocab_hash['<unk>'] for token in tokens]\n",
    "\n",
    "    def encode_huffman(self):\n",
    "        # Build a Huffman tree\n",
    "        vocab_size = len(self)\n",
    "        count = [t.count for t in self] + [1e15] * (vocab_size - 1)\n",
    "        parent = [0] * (2 * vocab_size - 2)\n",
    "        binary = [0] * (2 * vocab_size - 2)\n",
    "        \n",
    "        pos1 = vocab_size - 1\n",
    "        pos2 = vocab_size\n",
    "\n",
    "        for i in xrange(vocab_size - 1):\n",
    "            # Find min1\n",
    "            if pos1 >= 0:\n",
    "                if count[pos1] < count[pos2]:\n",
    "                    min1 = pos1\n",
    "                    pos1 -= 1\n",
    "                else:\n",
    "                    min1 = pos2\n",
    "                    pos2 += 1\n",
    "            else:\n",
    "                min1 = pos2\n",
    "                pos2 += 1\n",
    "\n",
    "            # Find min2\n",
    "            if pos1 >= 0:\n",
    "                if count[pos1] < count[pos2]:\n",
    "                    min2 = pos1\n",
    "                    pos1 -= 1\n",
    "                else:\n",
    "                    min2 = pos2\n",
    "                    pos2 += 1\n",
    "            else:\n",
    "                min2 = pos2\n",
    "                pos2 += 1\n",
    "\n",
    "            count[vocab_size + i] = count[min1] + count[min2]\n",
    "            parent[min1] = vocab_size + i\n",
    "            parent[min2] = vocab_size + i\n",
    "            binary[min2] = 1\n",
    "\n",
    "        # Assign binary code and path pointers to each vocab word\n",
    "        root_idx = 2 * vocab_size - 2\n",
    "        for i, token in enumerate(self):\n",
    "            path = [] # List of indices from the leaf to the root\n",
    "            code = [] # Binary Huffman encoding from the leaf to the root\n",
    "\n",
    "            node_idx = i\n",
    "            while node_idx < root_idx:\n",
    "                if node_idx >= vocab_size: path.append(node_idx)\n",
    "                code.append(binary[node_idx])\n",
    "                node_idx = parent[node_idx]\n",
    "            path.append(root_idx)\n",
    "\n",
    "            # These are path and code from the root to the leaf\n",
    "            token.path = [j - vocab_size for j in path[::-1]]\n",
    "            token.code = code[::-1]\n",
    "\n",
    "\n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [],
   "source": [
    "class UnigramTable:\n",
    "    \"\"\"\n",
    "    A list of indices of tokens in the vocab following a power law distribution,\n",
    "    used to draw negative samples.\n",
    "    \"\"\"\n",
    "    def __init__(self, vocab):\n",
    "        vocab_size = len(vocab)\n",
    "        power = 0.75\n",
    "        norm = sum([math.pow(t.count, power) for t in vocab]) # Normalizing constant\n",
    "\n",
    "        table_size = int(1e8) # Length of the unigram table\n",
    "        table = np.zeros(table_size, dtype=np.uint32)\n",
    "\n",
    "        print('Filling unigram table')\n",
    "        p = 0 # Cumulative probability\n",
    "        i = 0\n",
    "        for j, unigram in enumerate(vocab):\n",
    "            p += float(math.pow(unigram.count, power))/norm\n",
    "            while i < table_size and float(i) / table_size < p:\n",
    "                table[i] = j\n",
    "                i += 1\n",
    "        self.table = table\n",
    "\n",
    "    def sample(self, count):\n",
    "        indices = np.random.randint(low=0, high=len(self.table), size=count)\n",
    "        return [self.table[i] for i in indices]\n",
    "\n",
    "def sigmoid(z):\n",
    "    if z > 6:\n",
    "        return 1.0\n",
    "    elif z < -6:\n",
    "        return 0.0\n",
    "    else:\n",
    "        return 1 / (1 + math.exp(-z))\n",
    "\n",
    "def init_net(dim, vocab_size):\n",
    "    # Init syn0 with random numbers from a uniform distribution on the interval [-0.5, 0.5]/dim\n",
    "    tmp = np.random.uniform(low=-0.5/dim, high=0.5/dim, size=(vocab_size, dim))\n",
    "    syn0 = np.ctypeslib.as_ctypes(tmp)\n",
    "    syn0 = Array(syn0._type_, syn0, lock=False)\n",
    "\n",
    "    # Init syn1 with zeros\n",
    "    tmp = np.zeros(shape=(vocab_size, dim))\n",
    "    syn1 = np.ctypeslib.as_ctypes(tmp)\n",
    "    syn1 = Array(syn1._type_, syn1, lock=False)\n",
    "\n",
    "    return (syn0, syn1)\n",
    "\n",
    "def train_process(pid):\n",
    "    # Set fi to point to the right chunk of training file\n",
    "    start = vocab.bytes / num_processes * pid\n",
    "    end = vocab.bytes if pid == num_processes - 1 else vocab.bytes / num_processes * (pid + 1)\n",
    "    fi.seek(start)\n",
    "    #print 'Worker %d beginning training at %d, ending at %d' % (pid, start, end)\n",
    "\n",
    "    alpha = starting_alpha\n",
    "\n",
    "    word_count = 0\n",
    "    last_word_count = 0\n",
    "\n",
    "    while fi.tell() < end:\n",
    "        line = fi.readline().strip()\n",
    "        # Skip blank lines\n",
    "        if not line:\n",
    "            continue\n",
    "\n",
    "        # Init sent, a list of indices of words in line\n",
    "        sent = vocab.indices(['<bol>'] + line.split() + ['<eol>'])\n",
    "\n",
    "        for sent_pos, token in enumerate(sent):\n",
    "            if word_count % 10000 == 0:\n",
    "                global_word_count.value += (word_count - last_word_count)\n",
    "                last_word_count = word_count\n",
    "\n",
    "                # Recalculate alpha\n",
    "                alpha = starting_alpha * (1 - float(global_word_count.value) / vocab.word_count)\n",
    "                if alpha < starting_alpha * 0.0001: alpha = starting_alpha * 0.0001\n",
    "\n",
    "                # Print progress info\n",
    "                sys.stdout.write(\"\\rAlpha: %f Progress: %d of %d (%.2f%%)\" %\n",
    "                                 (alpha, global_word_count.value, vocab.word_count,\n",
    "                                  float(global_word_count.value) / vocab.word_count * 100))\n",
    "                sys.stdout.flush()\n",
    "\n",
    "            # Randomize window size, where win is the max window size\n",
    "            current_win = np.random.randint(low=1, high=win+1)\n",
    "            context_start = max(sent_pos - current_win, 0)\n",
    "            context_end = min(sent_pos + current_win + 1, len(sent))\n",
    "            context = sent[context_start:sent_pos] + sent[sent_pos+1:context_end] # Turn into an iterator?\n",
    "\n",
    "            # CBOW\n",
    "            if cbow:\n",
    "                # Compute neu1\n",
    "                neu1 = np.mean(np.array([syn0[c] for c in context]), axis=0)\n",
    "                assert len(neu1) == dim, 'neu1 and dim do not agree'\n",
    "\n",
    "                # Init neu1e with zeros\n",
    "                neu1e = np.zeros(dim)\n",
    "\n",
    "                # Compute neu1e and update syn1\n",
    "                if neg > 0:\n",
    "                    classifiers = [(token, 1)] + [(target, 0) for target in table.sample(neg)]\n",
    "                else:\n",
    "                    classifiers = zip(vocab[token].path, vocab[token].code)\n",
    "                for target, label in classifiers:\n",
    "                    z = np.dot(neu1, syn1[target])\n",
    "                    p = sigmoid(z)\n",
    "                    g = alpha * (label - p)\n",
    "                    neu1e += g * syn1[target] # Error to backpropagate to syn0\n",
    "                    syn1[target] += g * neu1  # Update syn1\n",
    "\n",
    "                # Update syn0\n",
    "                for context_word in context:\n",
    "                    syn0[context_word] += neu1e\n",
    "\n",
    "            # Skip-gram\n",
    "            else:\n",
    "                for context_word in context:\n",
    "                    # Init neu1e with zeros\n",
    "                    neu1e = np.zeros(dim)\n",
    "\n",
    "                    # Compute neu1e and update syn1\n",
    "                    if neg > 0:\n",
    "                        classifiers = [(token, 1)] + [(target, 0) for target in table.sample(neg)]\n",
    "                    else:\n",
    "                        classifiers = zip(vocab[token].path, vocab[token].code)\n",
    "                    for target, label in classifiers:\n",
    "                        z = np.dot(syn0[context_word], syn1[target])\n",
    "                        p = sigmoid(z)\n",
    "                        g = alpha * (label - p)\n",
    "                        neu1e += g * syn1[target]              # Error to backpropagate to syn0\n",
    "                        syn1[target] += g * syn0[context_word] # Update syn1\n",
    "\n",
    "                    # Update syn0\n",
    "                    syn0[context_word] += neu1e\n",
    "\n",
    "            word_count += 1\n",
    "\n",
    "    # Print progress info\n",
    "    global_word_count.value += (word_count - last_word_count)\n",
    "    sys.stdout.write(\"\\rAlpha: %f Progress: %d of %d (%.2f%%)\" %\n",
    "                     (alpha, global_word_count.value, vocab.word_count,\n",
    "                      float(global_word_count.value)/vocab.word_count * 100))\n",
    "    sys.stdout.flush()\n",
    "    fi.close()\n",
    "\n",
    "def save(vocab, syn0, fo, binary):\n",
    "    print('Saving model to', fo)\n",
    "    dim = len(syn0[0])\n",
    "    if binary:\n",
    "        fo = open(fo, 'wb')\n",
    "        fo.write('%d %d\\n' % (len(syn0), dim))\n",
    "        fo.write('\\n')\n",
    "        for token, vector in zip(vocab, syn0):\n",
    "            fo.write('%s ' % token.word)\n",
    "            for s in vector:\n",
    "                fo.write(struct.pack('f', s))\n",
    "            fo.write('\\n')\n",
    "    else:\n",
    "        fo = open(fo, 'w')\n",
    "        fo.write('%d %d\\n' % (len(syn0), dim))\n",
    "        for token, vector in zip(vocab, syn0):\n",
    "            word = token.word\n",
    "            vector_str = ' '.join([str(s) for s in vector])\n",
    "            fo.write('%s %s\\n' % (word, vector_str))\n",
    "\n",
    "    fo.close()\n",
    "\n",
    "def __init_process(*args):\n",
    "    global vocab, syn0, syn1, table, cbow, neg, dim, starting_alpha\n",
    "    global win, num_processes, global_word_count, fi\n",
    "    \n",
    "    vocab, syn0_tmp, syn1_tmp, table, cbow, neg, dim, starting_alpha, win, num_processes, global_word_count = args[:-1]\n",
    "    fi = open(args[-1], 'r')\n",
    "    with warnings.catch_warnings():\n",
    "        warnings.simplefilter('ignore', RuntimeWarning)\n",
    "        syn0 = np.ctypeslib.as_array(syn0_tmp)\n",
    "        syn1 = np.ctypeslib.as_array(syn1_tmp)\n",
    "\n",
    "def train(fi, fo, cbow, neg, dim, alpha, win, min_count, num_processes, binary):\n",
    "    # Read train file to init vocab\n",
    "    vocab = Vocab(fi, min_count)\n",
    "\n",
    "    # Init net\n",
    "    syn0, syn1 = init_net(dim, len(vocab))\n",
    "\n",
    "    global_word_count = Value('i', 0)\n",
    "    table = None\n",
    "    if neg > 0:\n",
    "        print('Initializing unigram table')\n",
    "        table = UnigramTable(vocab)\n",
    "    else:\n",
    "        print('Initializing Huffman tree')\n",
    "        vocab.encode_huffman()\n",
    "\n",
    "    # Begin training using num_processes workers\n",
    "    t0 = time.time()\n",
    "    pool = Pool(processes=num_processes, initializer=__init_process,\n",
    "                initargs=(vocab, syn0, syn1, table, cbow, neg, dim, alpha,\n",
    "                          win, num_processes, global_word_count, fi))\n",
    "    pool.map(train_process, range(num_processes))\n",
    "    t1 = time.time()\n",
    "    print\n",
    "    print('Completed training. Training took', (t1 - t0) / 60, 'minutes')\n",
    "\n",
    "    # Save model to file\n",
    "    save(vocab, syn0, fo, binary)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Unknown vocab size: 0\n",
      "Total words in training file: 229\n",
      "Total bytes in training file: 1169\n",
      "Vocab size: 133\n",
      "Initializing unigram table\n",
      "Filling unigram table\n",
      "Alpha: 0.025000 Progress: 229 of 229 (100.00%)Completed training. Training took 0.0009432673454284668 minutes\n",
      "Saving model to testw2v.model\n"
     ]
    }
   ],
   "source": [
    "\n",
    "\n",
    "if __name__ == '__main__':\n",
    "#     parser = argparse.ArgumentParser()\n",
    "#     parser.add_argument('-train', help='Training file', dest='fi', required=True)\n",
    "#     parser.add_argument('-model', help='Output model file', dest='fo', required=True)\n",
    "#     parser.add_argument('-cbow', help='1 for CBOW, 0 for skip-gram', dest='cbow', default=1, type=int)\n",
    "#     parser.add_argument('-negative', help='Number of negative examples (>0) for negative sampling, 0 for hierarchical softmax', dest='neg', default=5, type=int)\n",
    "#     parser.add_argument('-dim', help='Dimensionality of word embeddings', dest='dim', default=100, type=int)\n",
    "#     parser.add_argument('-alpha', help='Starting alpha', dest='alpha', default=0.025, type=float)\n",
    "#     parser.add_argument('-window', help='Max window length', dest='win', default=5, type=int) \n",
    "#     parser.add_argument('-min-count', help='Min count for words used to learn <unk>', dest='min_count', default=5, type=int)\n",
    "#     parser.add_argument('-processes', help='Number of processes', dest='num_processes', default=1, type=int)\n",
    "#     parser.add_argument('-binary', help='1 for output model in binary format, 0 otherwise', dest='binary', default=0, type=int)\n",
    "#     #TO DO: parser.add_argument('-epoch', help='Number of training epochs', dest='epoch', default=1, type=int)\n",
    "#     args = parser.parse_args()\n",
    "\n",
    "#     train(args.fi, args.fo, bool(args.cbow), args.neg, args.dim, args.alpha, args.win,\n",
    "#           args.min_count, args.num_processes, bool(args.binary))\n",
    "    \n",
    "    train(\"testw2v.txt\", \"testw2v.model\", True, 5, 10, 0.025, 5,0, 1, 0)\n",
    "    \n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "133 10\n",
      "\n",
      "<bol> -0.04911304804430818 0.021173916592996625 -0.020215983063805 0.03609052031585972 0.04780683032348061 -0.005098238743782445 -0.007392175366593182 -0.03581089268988043 -0.03960026551219543 0.0149095064065013\n",
      "\n",
      "<eol> 0.03755589244673203 -0.017849942225721522 -0.04217883686934874 0.0446033019523486 -0.0450780780350801 0.027391754210197943 -0.033991163711219456 -0.013891458886650181 -0.03410586562939282 0.029405777817388675\n",
      "\n",
      "the -0.02426557239366176 -0.007429374033589544 0.04515504050232456 -0.022502968762038954 0.04004228112450869 -0.03860454876297045 -0.027817048630592364 -0.004719411553270261 -0.012142895627710811 -0.005826760328651483\n",
      "\n",
      "I -0.02893797847372474 -0.006904844093047474 0.0022589891726591373 -0.006963443808911125 0.013643648331186748 -0.04175677485749366 -0.04049094715858837 -0.005420378456920921 0.026514104467240323 -0.02702022785136164\n",
      "\n",
      "on 0.04170862507266213 0.031075427487023573 -0.017716549056279625 -0.0028436139504810476 -0.04252062118154836 0.012765269866559851 0.0005005380102270737 -0.03969368867973959 0.020571343395473073 -0.03750344304944872\n",
      "\n",
      "and -0.02712776246397476 -0.0006067711533314206 -0.033163750307540686 0.007745961401542428 0.017109631397541656 0.028759091298080984 -0.007003282755088418 0.040654292684022264 -0.04438161026351525 -0.015941352949475858\n",
      "\n",
      "a 0.04421103449521173 0.013169955635067024 -0.024104802204606795 -0.0283296496399505 -0.038455271708887837 0.03976478012882877 -0.037111929336102564 0.007107063124451226 0.04542387352356695 0.02984165331599978\n",
      "\n",
      "of 0.043154396396513126 -0.046013609259001734 0.044344306185930896 -0.028080432535887528 0.012461112914680501 0.014854343667325066 0.031193696835487436 0.04564495474041235 -0.046697681149292215 0.030844217360212464\n",
      "\n",
      "come -0.025191595433209344 0.0486772136943915 0.0376690176105452 -0.010805963738528336 0.03429125364190626 -0.009096034817546826 0.03360154653639587 0.03698039400957878 0.009369340585685651 0.047623373299495654\n",
      "\n",
      "like 0.040411820542036486 0.01599896673497462 -0.04259377211007594 -0.04144935234720328 0.024681155711242467 -0.04693845367520298 -0.03319831404604272 0.005169003872727798 0.013127478846414273 -0.02902071446769521\n",
      "\n",
      "in -0.02688937579862642 -0.04397520081714664 0.04738383825085115 -0.049372687769053215 0.0076719379867733585 0.047171021873582686 -0.021720674181078637 -0.025988560801837896 -0.03997030220467298 -0.04193538317724458\n",
      "\n",
      "to 0.0219581339609507 0.001624498460513359 0.04487637493956598 0.015886558477834344 0.01130398513177299 0.04793854323479949 -0.0268638343703869 -0.008403897732003542 -0.02138825668957409 0.039912467627327405\n",
      "\n",
      "he 0.0026524673894955113 0.03347685296871146 -0.012446073922500741 -0.03574538971129639 -0.03437618386699402 0.015232020807760805 0.005218173348783625 -0.03133302483482633 -0.028677399602645096 0.04176364006177562\n",
      "\n",
      "curtain -0.037352132039600514 0.009650064949243656 -0.018410195971438052 -0.011102658773981762 0.03056990689484075 -0.03813308872735157 0.04725718160828223 -0.01280395102247259 -0.010386880681885565 -0.0468552415835528\n",
      "\n",
      "his 0.02464679833994608 -0.00452079284945886 0.012083522125968815 -0.012574361966519567 0.015817102310389478 -0.027962723764097246 -0.02187800293555447 -0.02055317757130499 -0.006912683513898426 -0.027132655331650848\n",
      "\n",
      "can't 0.016459166669846294 -0.037431560777772946 -0.0024284211268438445 0.029074088432533337 -0.0241744247207735 0.006433571584586496 0.016644773454020673 -0.014319790994285141 -0.03678147077464413 -0.029646363377297466\n",
      "\n",
      "bar -0.01498327688065046 -0.002074754616591374 0.04724163911795077 0.05033306383509784 0.014408871023115468 0.02650996899972642 -0.0390333999811642 0.02351549701762805 -0.046600904920761146 -0.021047588630478557\n",
      "\n",
      "一曲新词酒一杯， 0.03636041031816216 -0.046800273201769445 -0.016893542440382586 -0.0008846477024166717 0.017148878780352452 0.031089196681785652 -0.041895282809116995 -0.019923849423336658 0.030512341939191258 -0.018431717530662046\n",
      "\n",
      "compose 0.019618504082611565 -0.01749914846002616 -0.04906237147191615 -0.014426101781010336 -0.021581353664322202 -0.03568312104614973 0.0029095425500515015 -0.03850270758154808 0.004380707932595333 -0.03514370593148741\n",
      "\n",
      "new 0.010892956784392402 0.02611706703771217 0.047287472725992166 -0.015580782501320025 0.005230208607322435 0.006012141547999047 0.00594903909575538 0.04725278712798974 -0.026251028110459713 0.012933463907553221\n",
      "\n",
      "song 0.04915562844281035 0.0015723817421361347 0.04179667054554253 -0.03450374608788189 -0.00640169699447322 0.022101900561904815 0.03694448857940344 0.021993239558884388 0.0025098589144414048 -0.04709345340627294\n",
      "\n",
      "drink 0.0032802514819924816 -0.010896086307515599 0.03673033734485571 -0.028746827676165033 0.03680994518380114 0.003045187059376818 0.03813135692512778 0.03296263170244926 -0.012358706241238618 0.019415470360585455\n",
      "\n",
      "cup 0.00043681525824840206 0.046101660088899536 -0.03653489095888404 -0.014432926982311028 -0.01994597021446183 -0.0009051885140467188 0.048515371083496385 0.04395129604207541 -0.04416354197298864 0.02002804958309892\n",
      "\n",
      "wine 0.019872836794952324 -0.0174656756176245 -0.01670291388937737 0.025588772570279462 -0.016339247383740277 -0.0479926765740627 0.0435257378076911 -0.021055408405029282 0.026449947154182068 -0.009388331207861913\n",
      "\n",
      "去年天气旧亭台。 0.038276341634669066 -0.003976994730749286 -0.019208510446003562 0.02648336580034416 -0.02596299108014314 -0.023358425344720442 0.036680676853555604 0.01953832390155242 0.013672817230204718 0.025857890958615876\n",
      "\n",
      "In -0.04567395630761488 0.026160992977249588 0.02046690870364453 -0.004677962832757596 -0.019071064497505983 -0.026325758876719483 -0.0361172064248855 0.03541043934626028 0.0187418571501615 0.042342490696661156\n",
      "\n",
      "bower 0.008051407523156355 -0.046172033999736406 -0.0361295178685327 -0.0016858453346113922 -0.0388603350435086 -0.033765457827951324 -0.03921222829608427 0.022726643375937685 -0.005241739005206245 -0.014804294833151288\n",
      "\n",
      "last 0.04027840100627411 -0.015138322052374656 0.008435805346888106 0.009127392648049404 0.02244241582769053 -0.04510629129750983 0.037427556849372365 -0.029506050361447815 -0.016312922641115623 -0.024329844396894392\n",
      "\n",
      "year -0.03373298683316589 -0.02403025438677167 0.0026794738130998456 -0.04404590891957382 -0.007954447650873091 -0.006977019992173868 -0.01906104374362491 0.03778670812047795 0.03136757252027779 -0.032982167896152416\n",
      "\n",
      "when -0.03826602272719093 0.026852194354932222 -0.029344911820863323 0.04167316925295715 0.0012098181228858715 -0.031471894838777784 0.045000391409165265 0.025399283170895418 0.04623766863393463 -0.024816534075985944\n",
      "\n",
      "weather 0.017015768185497746 -0.028548019200290344 -0.00461977368220382 -0.03861329602016518 -0.03371367715888151 0.0172988557370558 0.0006394673423726316 0.009573819891358772 0.038105102918850504 -0.027770331943142358\n",
      "\n",
      "is 0.014972851247959808 -0.02188104761597853 0.011904799503601945 0.036698809584308115 0.0071532008744402105 0.021629055042806697 0.00045338559549100877 -0.015307440945342398 -0.0008923615735893566 -0.04501401543964052\n",
      "\n",
      "as 0.00011817159641086971 0.034951499292532925 0.00762550795918376 -0.0037723430404442743 0.04748121547363533 -0.0300137665640025 0.03268041047413009 -0.018530438094089798 0.008535205025564924 0.04260218426306842\n",
      "\n",
      "fine. 0.026694576895167983 -0.0077218419044533465 0.014602353373895205 -0.007121198216172254 0.024350371072179477 -0.011738592974435346 -0.02521361203675383 0.018151696973137354 -0.044248412628842874 -0.03177700386795555\n",
      "\n",
      "夕阳西下几时回。 0.006835995744685296 0.021119097617145532 0.04044957654976418 -0.03020192232630266 -0.016309486588575797 0.049788437037570195 -0.02664804113016138 -0.035479528564646756 -0.04668393050050119 -0.01399208722591915\n",
      "\n",
      "When 0.04106501134910567 0.009755277719099013 -0.03399814325807269 0.032484411810077725 0.036329424938084956 0.037375897329880746 -0.01784593438641806 -0.02307920474893804 0.03356639396982162 0.036731916876668734\n",
      "\n",
      "will 0.013986959104727761 -0.011270965921998733 0.04326979733281122 0.009497539632149925 -0.00324497715898307 0.03397147393934308 -0.021246518452672544 0.030771576023887866 0.008731403436968063 -0.0023780636487419184\n",
      "\n",
      "you 0.02420051618366048 0.018272655485000348 -0.015541372983680319 -0.03889456482897068 0.011699784229594558 -0.04625258054602732 -0.028401855622284894 -0.03550545646500131 -0.005036871774661344 0.012826951006049293\n",
      "\n",
      "back 0.0021833669038231873 0.027473648098553977 0.049435811976588605 0.020410274379980983 0.04352828468077992 -0.00696349029282768 -0.047387680959081746 0.006720621187156733 -0.0004417087662993705 -0.04340191864001334\n",
      "\n",
      "sun -0.034113693302512085 -0.03379795768411948 -0.030249632510805726 0.002673940550269738 -0.03229202794078602 -0.01147024445920387 -0.02620558926401107 0.016700724178955226 0.02318751825427697 0.009371677890556963\n",
      "\n",
      "decline? -0.022572546606679865 0.046804591108979623 -0.03186275879764331 -0.031011151553649103 0.0020732930609073516 -0.006383957025046726 -0.015204274768835433 0.005016186376939725 -0.03949810311704974 -0.046305435328976435\n",
      "\n",
      "无可奈何花落去， 0.047685420387618055 -0.040738356372989684 0.043007524783718803 -0.015148036768585105 0.040046097426246215 0.028972512394491126 -0.04675314895811463 -0.04619371524694893 -0.004007790131063981 0.03534581587745573\n",
      "\n",
      "Deeply -0.0355642490316818 -0.049128161922544315 0.034372997355289786 0.023737198231585238 -0.04195171570826621 0.02606960326363303 0.015048591564519582 0.0034087785682975244 0.015607000640140966 0.03841909259649309\n",
      "\n",
      "sigh -0.023420238150500056 0.020033565840373906 -0.0056715724963523955 -0.03713518075038255 -0.019383232235851967 0.025146860615007587 -0.019651024777693944 -0.04199137696206826 -0.0176041801065154 -0.038923659286693016\n",
      "\n",
      "for -0.024978325291992037 0.026894431518632574 0.04275267076053619 0.023794834622546657 -0.03144186698875858 0.03047261823280073 0.011988843279599376 -0.004104507948218847 -0.002300984206567219 0.04658298887537452\n",
      "\n",
      "fallen -0.043389871708173516 -0.02614435578122804 0.037611697208407235 -0.0325383436586917 -0.01573187064231301 -0.049141115537746985 -0.03476957021733179 0.04698166483329697 0.013672256088163998 -0.027586794374726893\n",
      "\n",
      "flowers 0.013098929616791698 -0.01295968701899895 -0.04075672170059943 0.016950401007396004 -0.020106131048022807 0.021839364034081692 0.026439345311453453 -0.04690662104476976 0.016339595936149563 0.023644879660049172\n",
      "\n",
      "vain. -0.013207017371879702 0.047528610851756865 -0.029929550078053146 -0.024913797247524957 -0.019892417842214224 -0.038884642680075326 -0.044227965670811976 0.04537928875742793 -0.029578365892776355 0.019033283477406168\n",
      "\n",
      "似曾相识燕归来。 0.0135011219629519 0.03471817476771298 -0.012846416588785407 0.010352325424572553 0.014091094534258086 -0.009077197202555174 0.03474598242458192 0.04154430093969481 0.04992874424326886 -0.0036306287598169766\n",
      "\n",
      "Vaguely 0.0033407026975061146 -0.009953182631061184 0.026405173417548184 0.04649454503415873 -0.04709157188055162 0.028269795056459334 0.0186702661607841 0.0498071268465018 -0.023790389066681242 -0.008406515484910292\n",
      "\n",
      "seem 0.008617683317005288 -0.03874072096199159 -0.036053875013526006 0.016714217680103536 -0.023411682963952057 -0.046464238326853015 -0.009551854285111572 -0.044500657552679235 0.021897858565900104 0.0043666142888589545\n",
      "\n",
      "know -0.013930606486672038 0.023131664222679336 -0.04279927057642272 -0.034877635545194775 -0.020791169327761445 0.0174704694991759 -0.00514860273761275 0.025185476838059575 0.0033695148575010984 -0.0466583278285977\n",
      "\n",
      "swallows -0.033143912744321054 -0.031177292066147725 0.03260340414164773 -0.03576407173658326 0.03444692998101746 0.02831403681371738 -0.02991337292127144 -0.019486055974069605 0.007390109255344559 0.00019199192878051205\n",
      "\n",
      "again. -0.026942969865178988 -0.037136194963558675 -0.019806460903136428 0.018397391984748688 0.0025913802122383093 0.04815455393036087 -0.048432162915551774 0.04738168465616182 0.02758428193245126 0.011994821831226498\n",
      "\n",
      "小园香径独徘徊。 0.04947988112643371 -0.006432909688306454 -0.001529974362398268 -0.002204506160151832 0.046881498388210964 -0.0019203014821263305 0.016487988514122896 0.019952109050866314 0.014579335166517125 0.041631829051273515\n",
      "\n",
      "Loitering 0.039622996317504294 -0.04636364155631596 0.03776153325557452 -0.007865914388400529 -0.03089504689825559 -0.002754625564577331 -0.019845692556949606 -0.006914976420168619 -0.01286380354124479 -0.018944881106298796\n",
      "\n",
      "garden -0.027415948459688422 0.04916563050378632 -0.02449933296071282 0.033230768589726206 0.03257180531982878 -0.010169031127896896 0.012418837474014986 -0.004261799425317938 0.00038082232408798107 -0.00019097018060002094\n",
      "\n",
      "path, -0.04202355002704042 -0.028526930151353665 0.030385765409011428 -0.04205348939527478 -0.02570578220177691 0.00863547374704922 0.026785824055841474 0.012248049090572637 -0.03720967599225294 -0.021607374753740496\n",
      "\n",
      "alone -0.02133692351778435 -0.05001065842480866 0.019268478729001507 -0.04836822154504072 -0.010077083492518206 -0.016788323408382006 -0.04769899684515109 0.04036307459894412 -0.04810450053023348 0.015012239464899485\n",
      "\n",
      "remain. 0.036270682628214354 -0.013853992967101322 0.03430886857750319 0.033283271289331715 0.03232753057827373 0.03961428658604876 -0.03467208577751705 -0.03271043513633321 0.0036538850995884065 -0.015290441285228268\n",
      "\n",
      "二、《蝶恋花.庭院深深深几许》——宋.欧阳修 -0.016763007128094465 0.030088102176161906 0.019666126577473464 0.0028846441610436892 -0.020789294775126057 0.0011420011662272404 -0.0023442419983632106 0.03878007212892356 -0.04096731343252937 0.018422077999616017\n",
      "\n",
      "庭院深深深几许？ 0.03180862765173199 -0.02221366885895626 -0.033514134612577975 -0.019189167559610937 -0.0022061250445110096 0.007089024293489393 0.030662020846601315 -0.0413330092520854 0.04884275829511436 0.007348883350331944\n",
      "\n",
      "Deep, -0.008275267887742691 0.004423692259775577 -0.03909634733472979 -0.04116518518371182 -0.03104544134958311 -0.04715334068783364 -0.03851932244603272 0.018401446333028783 0.03426325873839363 0.037836109536352594\n",
      "\n",
      "deep 0.0040886174042810645 -0.013021260400823251 0.0319273413719889 0.0166703629274797 -0.03096710997569033 -0.013870904869633013 -0.04044137288956706 -0.04483979841976977 -0.019277197083256713 0.029129154346543874\n",
      "\n",
      "courtyard 0.023624786130781002 0.035747758202329595 0.027247409943003065 0.045452003766559015 -0.0003561055699118331 0.003099752323942997 -0.019688284699102186 0.039446078669294934 0.02180143940418066 0.034108839622907124\n",
      "\n",
      "where -0.03316007722581051 0.014814365314521535 -0.017025379563842376 -0.0081327970009452 -0.01638215801582585 -0.02560294773124586 -0.047525421425629495 0.041244174335577775 -0.013356994043874167 0.010293465755078226\n",
      "\n",
      "is, -0.005822590552766783 -0.004920564556145332 -0.04370255477889813 0.028980042295631468 -0.00065028620319152 -0.042027442084909696 0.002719223402983359 -0.04308453437929736 0.015116830600976416 -0.03881071890086567\n",
      "\n",
      "so -0.002730958377911598 0.04168684556164063 -0.006406987067238072 0.004314479673932106 0.030262701547342986 -0.01953264828614472 -0.028314443657577782 0.048293173536536255 -0.006836928275310556 0.01743731249349567\n",
      "\n",
      "deep. 0.04201515352225387 0.04568094381283329 -0.022498762537696393 0.04404714118075735 0.04227407966186443 -0.04052315742504794 -0.01981973260800773 -0.008388514823918784 0.04752711753294183 -0.030536181445603404\n",
      "\n",
      "杨柳堆烟， 0.0311277094354294 -0.006195812823528104 0.0386067692259228 0.035155113605724174 -0.01486823391779096 -0.04529133658965179 0.016247705656690422 -0.012122443671138312 -0.028557742962578386 -0.03806989374174816\n",
      "\n",
      "It's -0.03636289719725442 0.02639153859415659 -0.001839830224498882 0.004048400017020767 -0.022165000992818068 -0.010083794282087121 -0.03167197309290857 -0.020838011397191998 0.04645267912864252 0.03627311419343035\n",
      "\n",
      "veiled 0.042126854604495556 0.019598695282468714 0.03832207837028674 -0.004309478606284785 0.04741905279179614 0.024081458813407834 0.045336073392760266 0.03790333878207433 -0.03871404934707531 -0.03338125448775132\n",
      "\n",
      "by -0.019442642487313158 0.0025120044932570446 0.040885193482011535 0.01899849022930606 -0.002380029319585121 -0.011194595693389792 0.021847815989540803 -0.043077521410061825 0.0023799006251638485 0.00987816949989117\n",
      "\n",
      "smoke -0.04046531526370833 -0.021056930436991524 0.0302704638208789 0.042346888830451346 0.03791649505075167 -0.04849834631596262 0.024744219991316835 -0.010494295838403979 0.017044227531374675 0.02749176948311448\n",
      "\n",
      "willows -0.047654323955156785 -0.017507833825209214 -0.04929052407244195 -0.036322273786703066 0.013946913162655141 -0.026707702718273302 -0.047796598817785245 -0.024038542207539332 0.01683361274901905 0.03388707041631266\n",
      "\n",
      "heap -0.02549873212606159 0.008584182947743636 -0.03864494428988667 0.002818082838529333 0.027289727938008757 0.014969752617162468 0.028061675639573614 -0.025927730867226582 -0.017943942727742676 0.04300970112484328\n",
      "\n",
      "heap. 0.0232737753881086 0.00018351431204506946 -0.03565748536041089 0.015243790890610566 -0.03264088291717124 0.030443719892972072 -0.04928398328682952 -0.029994321335830732 0.013764705063205393 -0.014828523354088118\n",
      "\n",
      "帘幕无重数。 0.028427958972128946 -0.019175740067624757 0.025825140179106424 -0.002731291533665957 0.03335889365313971 -0.021254766362200365 0.0407654275743311 0.03088187181726552 0.022637587690594724 0.0010709346646241354\n",
      "\n",
      "By 0.001000423541920282 0.014447144043904871 0.021071529346839465 -0.04520141265668709 0.010537998417388738 0.01102237614596947 0.0036383975386568104 -0.03765610719654393 0.04009961522129803 -0.028784936061171756\n",
      "\n",
      "screen -0.03158609673981151 0.03411975925935415 -0.004823179209974864 -0.03368611777939006 0.025574063435372545 0.004630874300956571 -0.04792931138948425 0.01854961941629364 -0.020129307084633202 -0.02656198570396953\n",
      "\n",
      "screen. -0.010106266913384147 0.015878819598471178 0.02454141996510556 0.03382991470375164 -0.0227155804763296 -0.027020473116030706 0.03370416868246366 0.033904800875006795 0.026338074973932677 0.03414416942437\n",
      "\n",
      "玉勒雕鞍游冶处， -0.049280189988541535 0.04886728738258247 -0.049049212093325564 0.0359023308080231 0.004966025160451067 0.04465474356640066 0.03956318300606513 -0.007363924991546144 0.020305337241749613 0.03675422833470101\n",
      "\n",
      "Leaving 0.013321599523343448 0.03543384807169295 0.026850012276560475 0.0193764948477397 0.0136767564559329 -0.03131089322454055 0.02215647036409899 0.009791780873137812 0.006130228438659736 0.019847586136813384\n",
      "\n",
      "saddle -0.033774588847629905 0.031137086158572573 -0.03306823271858662 0.01981489575522416 -0.009631066238273138 0.009785325042969832 0.022733146705718896 -0.0076923254694255564 0.04544964978702549 0.04006125081628977\n",
      "\n",
      "bridle, -0.0026778132760380833 -0.024714475534197974 -0.04514976208170342 -0.007483226021775764 0.033925479306313365 0.017789980738199034 0.046484879794402204 -0.04137490970321323 -0.007557822683749403 0.0005256409471444889\n",
      "\n",
      "there -0.031215897636705806 -0.042302766467449744 0.04447918716232656 -0.0025500803360481725 -0.022444508930618584 0.01289649770259344 0.04199166282028833 0.0290415457084583 0.015134264466172212 -0.02049459039071839\n",
      "\n",
      "has 0.017192257129667363 -0.012916112714583025 0.0176636998969791 0.0044116910535014625 -0.006097610220193642 0.029017029585642725 -0.008196795858685615 0.004050401909415125 -0.016063561373561148 -0.019127643582525868\n",
      "\n",
      "been 0.01968316550777385 0.04015970287898102 0.03183201324002787 -0.032271972929613105 -0.009255083543088977 0.01715153755384877 -0.00568771032732015 0.04836797704567547 0.02867418051861236 -0.03055099758597823\n",
      "\n",
      "楼高不见章台路。 0.027544536220978407 -0.011092935964657909 0.0075592719361312274 0.0456509518229923 0.04648310899991713 0.04496364806429454 -0.02283635794992006 -0.001304936255559681 -0.024320707223205878 0.04423989255055295\n",
      "\n",
      "Merry-making, 0.026872254122237815 -0.0010577372553940278 -0.0018543453241288738 -0.02714477742267563 -0.006992640981372158 0.036055922948140626 -0.030953835775391573 0.04661378393367381 -0.04270717239423886 -0.04479081149393093\n",
      "\n",
      "from 0.00022151602400329948 0.02599276130687592 -0.02632165981249664 -0.0009477430601608997 -0.04762145717482853 0.035006073964013444 0.023218616849058092 0.0029402140074095313 0.048492569148829365 -0.0215729441053483\n",
      "\n",
      "my 0.023626137622945582 0.007768034941223221 -0.03951891414122352 -0.028541017105279457 -0.032373109835651115 -0.0476330753402396 -0.007029941347739594 0.02946539315655315 0.0060833980817021575 0.007882112002612096\n",
      "\n",
      "tower 0.049400693852859855 0.012872571798745881 -0.027664828940365337 0.026647196889443774 -0.03987182390631679 0.03774436626580578 -0.003866343007453698 0.010715796551418832 0.04191723415177702 0.0306594140846223\n",
      "\n",
      "trace -0.028791064181226924 0.01464358784055404 0.015802385529720003 0.043449072150816954 -0.03461031008079329 -0.025990609163633566 0.009013563259667288 0.014675251815309627 -0.029299105681792323 0.04247490928251578\n",
      "\n",
      "be -0.022181280195030517 -0.015201561508815551 -0.02066154605242638 0.040789353906385634 -0.03319014601591076 0.040585825036899496 0.00720197950791911 -0.040901964336982816 -0.030407894009421052 0.031904309843302045\n",
      "\n",
      "seen. -0.018245364079080454 0.011374981149254552 -0.02453946581394229 0.024861346384743164 0.005668087112830993 -0.00866899579713909 -0.040290103827653134 0.03446922487000163 0.036459391838537705 -0.002452675282543352\n",
      "\n",
      "雨横风狂三月暮， -0.023383616320712668 0.0488759575230853 -0.017998291637202464 0.01888196214581904 0.023699315036405107 -0.008771938850272139 -0.011130384111023654 -0.03725785378498487 -0.0022905349312458525 0.027577471789118273\n",
      "\n",
      "The 0.04441638570477314 0.006595718930295651 -0.0022578742172733686 0.042796544899427226 -0.0380472158128189 -0.008090363626616017 -0.017460519162487198 -0.004832762918839914 0.033681084632851424 0.01824639118256403\n",
      "\n",
      "third 0.013695077420438035 -0.0032117602886262467 0.011652111210846677 -0.03610799230936593 0.008306235005718277 0.010361415714750317 -0.009582250251758424 -0.03580180869001496 0.011354910682125283 -0.03042335140410235\n",
      "\n",
      "moon 0.030693278683339903 0.01361485406477016 0.009170059439768698 -0.007444098288389158 -0.007365672002588627 0.016103532977244716 -0.004549027585686084 0.04357684656332161 0.047043690178365634 -0.02788360427925965\n",
      "\n",
      "now, 0.003468433208551323 -0.02699754051314988 -6.744893361943912e-05 0.02899540243973366 -0.025817370235107193 0.01660545943952508 0.04031884013067062 0.018411863507932502 0.04358572682540588 0.04696154432195936\n",
      "\n",
      "wind -0.046311738702502325 -0.04958545710311111 -0.006510724947303663 -0.025109021930066557 0.006021132550847655 0.042574191324430924 0.014234395244176365 -0.03416086204786658 -0.048282518942555266 0.012906902856191546\n",
      "\n",
      "rain -0.008384380426649482 -0.01007900342455019 0.01212847605097645 -0.028584699688231748 -0.009856657135349492 -0.028407580554981018 -0.026260434296548802 0.03276085375354812 -0.006783611715859968 0.02410830234870882\n",
      "\n",
      "are 0.042360296854633814 0.01385467276393291 -0.010601811856035804 -0.02029558832602113 -0.038324074362585475 0.03426689642026695 0.017667853775683386 0.008359821290758445 -0.04603997280389022 0.04053509194998998\n",
      "\n",
      "raging -0.010573585311529876 0.013721911616422812 -0.010584302324702857 -0.04180871300170828 0.0363010607843043 -0.04022659235220152 -0.044779669725060124 0.023932030290575213 0.039518746487167224 -0.028683569409928146\n",
      "\n",
      "late; 0.033171265829807485 0.002086961922186738 -0.03452681948236588 -0.04720565433501955 0.020119659979318194 0.04296684898702658 0.021972328641795057 0.0060182771425602395 0.025025403962093875 -0.01831226433746099\n",
      "\n",
      "门掩黄昏， 0.007329514444872885 0.016540574947121196 -0.030438173299424248 0.019036674654836792 0.0012599008522953645 0.0180284295605339 0.029240429603242912 -0.03351978837511508 -0.023311489883972972 0.0012853378439889127\n",
      "\n",
      "At 0.016276916934901884 -0.01734714939328185 -0.025201042906295865 0.010038855267264259 0.039460949731966746 -0.017923261653077876 -0.037503260850619946 -0.04914520693326202 -0.0229450870461667 -0.02185677151184532\n",
      "\n",
      "dusk 0.03626264195127362 0.020205507068187217 -0.0434930872142174 0.04847322323262575 -0.03924623029248967 0.008227129234759718 -0.041541158427632265 -0.021189190795774828 0.029229806737972156 0.01853009043225833\n",
      "\n",
      "gate, 0.013256066754310687 0.021986770431710205 -0.00510238015743674 0.014470432983304785 0.04963080197555422 0.0457502183382591 0.03930856761890027 -0.006014129726419469 -0.022303807229648313 0.016693324065054176\n",
      "\n",
      "无计留春住。 -0.025506402460518603 0.036668120841822124 -0.018022622785233106 -0.033384990675262745 -0.03649846978277067 0.032292309136375 -0.02371263362771426 0.029012608717108326 0.027932953774457363 -0.0016367394076823654\n",
      "\n",
      "But -0.018135827765597724 0.013997896570120459 -0.018636626004889596 -0.04226716772177315 0.0337503364938523 0.009681084103376874 0.02095924817302263 0.005202022135770331 -0.013407161824029838 -0.027103752375457243\n",
      "\n",
      "spring. -0.006771501082281693 -0.017293083813464172 0.006804268801714395 -0.012063300734519429 -0.041196138276695216 0.008127554953086976 0.039527110660827994 -0.025233693390153404 -0.045916528080086456 -0.018663368900225814\n",
      "\n",
      "泪眼问花花不语， 0.046184588781305574 0.01415458057665793 -0.03356461582644591 -0.047790019720830045 -0.0462684918877548 0.029892342466333335 0.03699026590203839 -0.04862584113783396 -0.04744474053680236 -0.025960213486491798\n",
      "\n",
      "My 0.03470030244286013 -0.04972367656503314 -0.04577190673300935 0.040592198311691426 -0.02896869886237033 -0.0009886835525344591 0.023183876850497442 -0.005443606370069297 -0.033738381921243225 -0.03749049781313172\n",
      "\n",
      "tearful -0.024786436964032415 0.033676628346127974 0.03888875971403166 -0.012233473829905134 -0.013877687787893604 0.034161469041414115 -0.03188483549067371 -0.037438186816959665 0.031633539291844835 -0.03802451117047086\n",
      "\n",
      "eyes 0.008201192979587362 -0.016231614367232805 0.013282364885201046 0.036506569502326336 0.0404968425186701 0.030000601354886424 -0.033080460046438984 0.028328717890496773 0.031779820204425324 0.04328226238235509\n",
      "\n",
      "ask 0.020863922746819086 0.012366908437867224 0.03873532401096287 0.0321760461704745 -0.016046961326198015 -0.0277928533060592 0.0069304273046452225 -0.031443410593455456 0.012454575763818043 0.04564682360887574\n",
      "\n",
      "flowers, -0.003934929185893316 -0.04837720898943042 -0.02350958728574502 0.030976972655297016 -0.04814127530069179 0.005493597456692653 0.005726385137593664 -0.028818216945541878 -0.0482597377975537 -0.04910404949034678\n",
      "\n",
      "but -0.029208270317940297 -0.0013785502316847435 -0.031981982569553556 0.02994104612923556 -0.013675296359567773 0.009963963716881682 0.01966874238857635 -0.033557713532370984 -0.029001132522163946 -0.016943711898822347\n",
      "\n",
      "they 0.018161139414237932 -0.017739264408781878 -0.01801916806567929 0.045021878510541784 0.008252052630631647 -0.020325237079110988 -0.022361443805079304 0.04276498564469424 -0.03688041088711725 -0.03852596835367926\n",
      "\n",
      "fail 0.009023007425332201 -0.002641571137003779 -0.040274764622604674 0.018957761759934876 -0.02472081547636898 -0.022803114440919356 0.03953101019910383 0.0418823828041607 0.0013533736541234832 -0.04928057999087383\n",
      "\n",
      "bring -0.007256229590336982 -0.00270033238163188 0.015072724130131379 0.04227426312673846 -0.0036649247982829397 0.039787376037807645 -0.03658880769972144 0.009510537923931028 -0.030266678802416683 -0.04555541168312177\n",
      "\n",
      "乱红飞过秋千去。 -0.044082100393434215 -0.02064526922633789 -0.039012193847367815 0.029096724058791573 -0.03725986117926775 -0.03715607053536804 -0.02443311702328895 -0.045526354188379935 -0.006431956169903559 -0.011478741273907585\n",
      "\n",
      "An -0.04947969664329205 -0.028904062633652732 0.03656109794734635 0.021049387383638575 -0.024549017828560487 0.04835224868317234 0.00036710636497073394 -0.02838937908016252 0.039094473634758 -0.04884607371464403\n",
      "\n",
      "answer. 0.020930320535830042 0.014118415247243487 0.0029605623923589165 -0.029895133272300024 0.03815447404369641 0.0013723629651620464 0.01165209753327209 0.04862451558983651 0.03979156840328191 -0.01784106490778866\n",
      "\n",
      "see -0.0415002109868896 0.005622764710616188 0.008946515966964283 0.03986002819910205 -0.029274659363257724 -0.01275777883777388 -0.02695252593580323 -0.03531007505219473 0.03253484482892326 0.0023088551431896197\n",
      "\n",
      "red 0.008457874001898819 0.030021119222028155 -0.04710588227296974 -0.018460104844916954 0.015536202423749795 -0.0204184808684203 0.009611920887201825 -0.03781553738642745 0.040149739464065436 -0.013664414737342782\n",
      "\n",
      "blooms 0.01793567377428455 0.02885483704031843 0.04884849044102094 -0.02381043639251826 -0.0179498842374239 0.00882851486152556 -0.02039542785165664 -0.04771833201856506 -0.032498608119102365 0.04983037949892182\n",
      "\n",
      "fly 0.04296153451416818 -0.043526132398103795 0.012067461448009837 0.04763951213359968 0.023102993355085723 0.04170122356072064 -0.04207842081008928 -0.01448526914156309 0.030369111689440995 -0.04333899471648736\n",
      "\n",
      "over -0.03319912980544834 0.028057158059014476 -0.006710675756691755 0.017524851884807954 0.021983037364546838 0.026857952389010623 0.0011260218066935607 0.03207727366403893 0.03910876346969156 0.016747510104098386\n",
      "\n",
      "swing. -0.002826402761996792 -0.005179562102303345 0.007111099534501863 -0.03666577179841162 -0.0028155777380301206 0.008270427964916578 -0.014753782992604714 -0.04099982191341226 -0.030583753202179115 0.004018593538650204\n",
      "\n",
      "<unk> -0.0007793550993606818 0.03600938031015498 0.0274629379138316 -0.038957443483001314 -0.019594583531336286 0.017552604667057686 0.02115556579684813 -0.03681763065798909 0.012117456683073048 -0.011981304987254762\n",
      "\n"
     ]
    }
   ],
   "source": [
    "fo = open(\"testw2v.model\")\n",
    "for l in fo:\n",
    "    print(l)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.4"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
