import torch
import torch.nn as nn
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence,pad_sequence
import numpy as np

class BiLSTM(nn.Module):
    START_TAG ="<START>"
    STOP_TAG = "<STOP>"

    def __init__(self,vocab_size, emb_size,hidden_size,out_size):

        super(BiLSTM,self).__init__()

        self.embedding = nn.Embedding(vocab_size,hidden_size)
        self.bilstm = nn.LSTM(emb_size,hidden_size,batch_first=True,bidirectional=True)
        self.linear = nn.Linear(2*hidden_size,out_size)

    def forward(self,sentence_tensor, lengths):
        emb = self.embedding(sentence_tensor)
        packed = pack_padded_sequence(emb,lengths,batch_first=True)
        rnn_out, _ = self.bilstm(packed)
        rnn_out, _ = pad_packed_sequence(rnn_out)
        scores = self.linear(rnn_out)
        return scores

    def test(self, sents_tensor, lengths, _):
        """第三个参数不会用到，加它是为了与BiLSTM_CRF保持同样的接口"""
        logits = self.forward(sents_tensor, lengths)  # [B, L, out_size]
        _, batch_tagids = torch.max(logits, dim=2)

        return batch_tagids


class BiLSTM_CRF(nn.Module):
    START_TAG = "<START>"
    STOP_TAG = "<STOP>"

    def __init__(self, vocab_size,emb_size,hidden_size,out_size,tag_to_ix):
        super(BiLSTM_CRF,self).__init__()
        self.bilstm = BiLSTM(vocab_size, emb_size, hidden_size, out_size)
        self.out_size = out_size
        self.tag_to_ix = tag_to_ix
        self.transitions = nn.Parameter(
            torch.randn(out_size, out_size))

        self.transitions.data[tag_to_ix[BiLSTM_CRF.START_TAG], :] = -10000.
        self.transitions.data[:,tag_to_ix[BiLSTM_CRF.STOP_TAG]] = -10000.

    def forward(self, sent_tensor,lengths):

        emission = self.bilstm(sent_tensor, lengths)


    def vertibi_(self,feats):

        for feat in feats:
            for next_tag in range(len(self.tag_to_ix)):
                pass



if __name__ == '__main__':

    result = [torch.Tensor(np.random.randint(5, 10, 5)),torch.Tensor(np.random.randint(1,4,7)),torch.Tensor(np.random.randint(11,14,3)),torch.Tensor(np.random.randint(1,4,2))]
    result = pad_sequence(result,batch_first=True,padding_value=-10000)
    print(result)
    pack_result = pack_padded_sequence(result,torch.tensor([7,5,3,2]),batch_first=True)
    print(pack_result)