# -*- coding: utf-8 -*-

import torch.nn as nn
from transformer.module.multi_head_attention import MultiHeadAttention
from transformer.module.multi_head_attention import MaskedMultiHeadAttention
from transformer.module.residualfeedforward import ResidualFeedForward


class DecoderLayer(nn.Module):
    def __init__(self, d_model, n_head, dropout=0.1):
        super(DecoderLayer, self).__init__()
        self.d_model = d_model
        self.n_head = n_head
        self.dropout = dropout
        single_head_dim = self.d_model // self.n_head

        # masked multi-head attention
        self.masked_multi_head_attention = MaskedMultiHeadAttention(self.n_head,
                                                                    self.d_model,
                                                                    single_head_dim,
                                                                    single_head_dim,
                                                                    dropout=dropout)
        # multi-head attention
        self.multi_head_attention = MultiHeadAttention(self.n_head,
                                                       self.d_model,
                                                       single_head_dim,
                                                       single_head_dim,
                                                       dropout=dropout)
        # residual feed-forward
        self.feedforward = ResidualFeedForward(self.d_model, dropout=dropout)

    def forward(self, src_enc, tgt_enc, src_mask=None):
        """

        :param src_enc:         B, L, H
        :param tgt_enc:         B, L', H
        :param src_mask:        B, 1, 1, L
        :return:
        """
        # 1. masked multi-head attention
        q = k = v = src_enc
        src_enc, _ = self.masked_multi_head_attention.forward(q, k, v)

        # 2. multi-head attention
        q = tgt_enc
        k = v = src_enc
        tgt_enc, _ = self.multi_head_attention.forward(q, k, v, mask=src_mask)

        # 3. residual feed-forward
        tgt_enc = self.feedforward.forward(tgt_enc)

        return tgt_enc
