# -*- coding: utf-8 -*-
# @Time    : 2020/12/9 下午11:58
# @Author  : lilong

import torch


class LM_Mask(object):
    """定义下三角Attention Mask（语言模型用）
    """

    def compute_attention_mask(self, inputs=None):
        """通过idxs序列的比较来得到对应的mask
        """
        if self.attention_mask is None:
            def lm_mask(s):
                seq_len = s.shape[1]    # 第二维度
                secondDim_ones = torch.ones(seq_len, seq_len)
                mask = torch.tril(secondDim_ones, diagonal=0)
                return mask[None, None]  # 二维扩展为四维

            self.attention_mask = self.apply(
                inputs=self.inputs[0],
                layer=Lambda,
                function=lm_mask,
                name='Attention-LM-Mask'
            )

        return self.attention_mask
