# mini lstm code
# import mxnet as mx
import mxnet.ndarray as nd
import mxnet.gluon as gluon
import mxnet.gluon.nn as nn
import mxnet as mx

mx.random.seed(1)

lstm = nn.Sequential()
lstm.add(gluon.rnn.LSTM(10, 2, layout='NTC'))
lstm.initialize()
print(lstm)
# batch 2, seq len 3, feature len 3
data = nd.array([[[1, 2, 3], [3, 4, 5], [6, 7, 8]],
                 [[1, 2, 3], [3, 4, 5], [6, 7, 8]]])
print(lstm(data))
print(lstm)




class Encoder(nn.Block):
    def __init__(self, vocab_size, emb_size, hidden_size, **kwargs):
        super(Encoder, self).__init__(**kwargs)
        self.embedding = nn.Embedding(vocab_size, emb_size)
        self.left_cell = gluon.rnn.LSTMCell(hidden_size=hidden_size)
        self.right_cell = gluon.rnn.LSTMCell(hidden_size=hidden_size)
        self.rnn_cell = gluon.rnn.BidirectionalCell(self.left_cell, self.right_cell)

    def forward(self, x, seq_lens_src):
        length = int(nd.max(seq_lens_src).asscalar())
        print(length)
        x = self.embedding(x)
        print(seq_lens_src)
        print(x)
        ret = self.rnn_cell.unroll(15, x, valid_length=seq_lens_src)
        return ret


def test_net():
    # vocab_size, emb_size, hidden_size
    net = Encoder(20, 16, 10)
    net.initialize()
    # batch, seq len
    data = nd.zeros((3, 15))
    print(net(data, nd.array([15, 4, 3])))