# -*- coding: utf-8 -*-

import torch


def shift(tensor, sos_id):
    batch_sos = torch.ones([tensor.size(0), 1], device=tensor.device, dtype=torch.long) * sos_id
    return torch.cat([batch_sos, tensor[:, :-1]], -1)


def len2mask(sent_len, max_len):
    """

    :param sent_len: torch.LongTensor [B, ]
    :param max_len: long (abbreviated L)          |B=batch_size, L=seq_len, H=hidden_dim, E=emb_dim|
    :return: sent_mask: torch.LongTensor [B, L]
    """
    # [L]
    index = torch.arange(max_len, device=sent_len.device)  # [0, 1, 2, 3, ..., max_len - 1]
    # [1, L]
    unsqueeze_index = index.unsqueeze(0)
    # [1, L]  ==>  [B, L]
    expanded_index = unsqueeze_index.expand(sent_len.shape[0],
                                            unsqueeze_index.shape[1])
    expanded_sent_len = sent_len.unsqueeze(1).expand(sent_len.shape[0],
                                                     expanded_index.shape[1])
    sent_mask = torch.tensor((expanded_sent_len > expanded_index),
                             dtype=torch.long)
    return sent_mask  # [B, L] ==> [B, 1, 1, L]


if __name__ == '__main__':
    _sent_len = torch.tensor([5, 3])
    _max_len = 9
    _sent_mask = len2mask(_sent_len, _max_len)
    print(_sent_mask)
