import torch.nn as nn
from modules.encoders.rnn_encoder import RNNEncoder
from typing import List


class ParallelRnnEncoder(nn.Module):
    """

    Args:
        encoders_list : n * List[RNNEncoder]
    Input:
        src_list : n * [batch, max_src_len]
        lengths_list : n * [batch,]
    Output:
        states:
           LSTM : n * [num_layers * directions, batch_size, hidden_size], [num_layers * directions, batch_size, hidden_size]
           GRU or RNN : n* [num_layers * directions, batch_size, hidden_size]
        memory_banks: n * [batch_size, max_src_len, num_directions * hidden_size]
        lengths : n * [batch, ]
    """

    def __init__(self,
                 encoders_list: List[RNNEncoder], ):
        super(ParallelRnnEncoder, self).__init__()

        encoders_hidden_size = [each.hidden_size for each in encoders_list]
        assert len(set(encoders_hidden_size)) == 1
        self.total_hidden = sum(encoders_hidden_size)
        self.single_hidden = encoders_hidden_size[0]
        self.hidden_size = encoders_hidden_size[0]
        self.encoders = nn.ModuleList(encoders_list)
        self.num_encoders = len(encoders_hidden_size)

    def forward(self, src_list, lengths_list=None):
        memory_banks = []
        states = []
        for idx, encoder in enumerate(self.encoders):
            state, memory_bank, _ = encoder(src_list[idx], lengths_list[idx])
            memory_banks.append(memory_bank)
            states.append(state)

        return states, memory_banks, lengths_list

    @classmethod
    def from_opt(cls, opt, embedding=None):
        pass


if __name__ == '__main__':
    import torch

    t_embedding = torch.nn.Embedding(12, 7, 0)
    t_src_list = [
        torch.tensor([
            [1, 2, 3, 4, 5],
            [2, 3, 4, 5, 0],
            [3, 4, 5, 0, 0],
            [4, 5, 0, 0, 0]
        ]), torch.tensor([
            [1, 2, 3, 4, 5],
            [2, 3, 4, 5, 0],
            [3, 4, 5, 0, 0],
            [4, 5, 0, 0, 0]
        ])
    ]
    t_src_len = [torch.tensor([5, 4, 3, 2]), torch.tensor([5, 4, 3, 2])]
    rnn_encoders = [RNNEncoder(
        rnn_type="GRU",
        bidirectional=False,
        num_layers=2,
        hidden_size=32,
        dropout=0.0,
        embedding=t_embedding
    ), RNNEncoder(
        rnn_type="GRU",
        bidirectional=False,
        num_layers=2,
        hidden_size=32,
        dropout=0.0,
        embedding=t_embedding
    )]
    pe = ParallelRnnEncoder(
        encoders_list=rnn_encoders
    )
    d1,d2,d3 = pe(t_src_list, t_src_len)
    print(d3)
