# @Author : bamtercelboo
# @Datetime : 2018/07/19 22:35
# @File : model_BiLSTM.py
# @Last Modify Time : 2018/07/19 22:35
# @Contact : bamtercelboo@{gmail.com, 163.com}

import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import numpy as np
import random
from DataUtils.Common import seed_num
from self_attention import SelfAttentionDot
torch.manual_seed(seed_num)
random.seed(seed_num)

"""
Neural Networks model : Bidirection LSTM
"""


class BiLSTM(nn.Module):
    
    def __init__(self, word_length):
        super(BiLSTM, self).__init__()
        self.hidden_dim = 64
        self.num_layers = 1
        V = word_length
        D = 256
        C = 2
        # self.embed = nn.Embedding(V, D, max_norm=config.max_norm)
        self.embed = nn.Embedding(V, D)
        # pretrained  embedding
        self.bilstm = nn.LSTM(D, self.hidden_dim // 2, num_layers=1,
                              bidirectional=True, bias=False,batch_first=True)
        print(self.bilstm)

        self.hidden2label1 = nn.Linear(self.hidden_dim, self.hidden_dim // 2)
        self.hidden2label2 = nn.Linear(self.hidden_dim // 2, C)
        # self.dropout = nn.Dropout(config.dropout)
        self.self_attention= SelfAttentionDot(64,64)
    def forward(self, x):
        embed = self.embed(x)
        x = embed.view(-1,20, 256)
        bilstm_out, _ = self.bilstm(x)

        bilstm_out = torch.tanh(bilstm_out)
        bilstm_out = self.self_attention(bilstm_out)
        # bilstm_out=bilstm_out.transpose(1,2)
        # bilstm_out = F.max_pool1d(bilstm_out, bilstm_out.size(2)).squeeze(2)
        bilstm_out = torch.tanh(bilstm_out)
        y = self.hidden2label1(bilstm_out)
        y = self.hidden2label2(y)
        logit = F.softmax(y,dim=-1)
        return logit

if __name__ == '__main__':
    print(BiLSTM(10))