# -*- coding:utf-8 -*-
import torch
import torch.nn as nn
import torch.nn.functional as F


class BiLSTMMatcher(nn.Module):
    def __init__(self, emb_vectors, hidden_size, n_layers=1, dropout_p=0.0):
        super(BiLSTMMatcher, self).__init__()
        assert hidden_size % 2 == 0
        self.vocab_size = emb_vectors.size(0)
        self.emb_size = emb_vectors.size(1)
        self.hidden_size = hidden_size // 2
        self.bidirectional = True

        self.embeddings = nn.Embedding.from_pretrained(emb_vectors)

        self.lstm = nn.LSTM(self.emb_size, self.hidden_size,
                            num_layers=n_layers,
                            batch_first=True,
                            bidirectional=True)
        self.dropout = nn.Dropout(p=dropout_p)

        pass

    def forward(self, query_inputs, candidate_inputs, **kwargs):

        query_emb_inputs = self.embeddings(query_inputs)
        query_emb_inputs = self.dropout(query_emb_inputs)
        query_outputs, _ = self.lstm.forward(query_emb_inputs)

        candidate_emb_inputs = self.embeddings(candidate_inputs)
        candidate_emb_inputs = self.dropout(candidate_emb_inputs)
        candidate_outputs, _ = self.lstm.forward(candidate_emb_inputs)

        query_output = self.dropout(query_outputs[:, -1, :])
        candidate_output = self.dropout(candidate_outputs[:, -1, :])

        similarity = F.cosine_similarity(query_output, candidate_output, dim=1)
        return similarity
        pass


if __name__ == '__main__':
    targets = torch.ones(12, dtype=torch.long)

    embs = torch.randn(100, 64)
    matcher = BiLSTMMatcher(embs, 64)

    input1 = torch.LongTensor([[1, 3, 4, 0], [20, 30, 50, 60]])
    input2 = torch.LongTensor([[1, 30, 40, 10], [2, 3, 5, 0]])
    mask = torch.ne(input1, 0)
    mask_output = (input1+3) * mask
    print(mask)
    print(mask_output)

    # matcher.forward(input1, input2)
    pass
