

import numpy as np  
import torch
import torch.nn as nn 
import torch.nn.functional as F 
   
#-------------------------------字典读取---------------------------#  
with open("ckpt/word2id.mad", "r", encoding="utf-8") as word_dict_file:
        word2id = eval(word_dict_file.read()) 
id2word = {} 
for key in word2id:
    id2word[word2id[key]] = key 


class Model(nn.Module):
    def __init__(self, n_word):
        super().__init__()
        self.n_word = n_word
        self.n_hidden = 128 
        self.n_layer = 2 
        # 向量化函数
        self.emb = nn.Embedding(self.n_word, self.n_hidden)
        # 循环神经网络主体
        self.rnn = nn.GRU(self.n_hidden, self.n_hidden, self.n_layer)
        # 定义输出
        self.out = nn.Linear(self.n_hidden, self.n_word)
    def forward(self, x, h0):
        B, T = x.shape 
        x = self.emb(x)
        y, h = self.rnn(x, h0)
        y = self.out(y)
        y = y.permute(1, 2, 0) # T,B,C->B,C,T
        y = torch.softmax(y, dim=1)
        return y, h
   
def to_word(weights):  
    weights = np.reshape(weights, [-1])
    t = np.cumsum(weights)  
    s = np.sum(weights)  
    sample = int(np.searchsorted(t, np.random.random()*s))  
    sample = np.argmax(weights)
    #sample = np.random.choice(np.arange(len(weights)), p=weights)
    return id2word[sample]  
print(len(word2id))

model = Model(len(word2id)) 
model.eval() 
model.load_state_dict(torch.load("ckpt/mad.pt"))
pres = ['男','女','爱']
for s in pres:
    h = torch.zeros([2, 1, model.n_hidden]) 
    words = [s]
    for i in range(15):
        w = torch.Tensor([[word2id[words[-1]]]]).long()
        p, h = model(w, h)
        p = p.detach().numpy()
        p = np.reshape(p, [-1]) 
        w = to_word(p)
        words.append(w) 
        if w == "\n":
            break 
    print("".join(words))
