"""
连接主义时间分类损失 (Connectionist Temporal Classification loss)。
计算连续（未分段）时间序列与目标序列之间的损失。CTCLoss 对输入与目标序列所有可能的对齐方式的概率求和，
从而生成一个相对于每个输入节点都可微分的损失值。输入与目标的对齐被假定为“多对一”关系，
这将目标序列的长度限制为必须 ≤ 输入长度。

参数
blank (int, optional) – 空格标签。默认值 0
reduction (str, optional) – 指定要应用于输出的归约方式：'none' | 'mean' | 'sum'。'none'：不应用任何归约；'mean'：输出损失将除以目标长度，然后取批次平均值；'sum'：输出损失将被求和。默认值：'mean'
zero_infinity (bool, optional) – 是否将无限损失及其相关梯度清零。默认值：False 当输入序列过短无法与目标序列对齐时，主要会出现无限损失。
"""
import torch
from torch import nn

# Target are to be padded
T = 50  # Input sequence length 文本序列/时间序列
C = 20  # Number of classes (including blank)  此处为声音的vocab_size
N = 16  # Batch size 批次大小
S = 30  # Target sequence length of longest target in batch (padding length)
S_min = 10  # Minimum target length, for demonstration purposes
# Initialize random batch of input vectors, for *size = (T,N,C)
input = torch.randn(T, N, C).log_softmax(2).detach().requires_grad_()
# Initialize random batch of targets (0 = blank, 1:C = classes)
target = torch.randint(low=1, high=C, size=(N, S), dtype=torch.long)
input_lengths = torch.full(size=(N,), fill_value=T, dtype=torch.long)
target_lengths = torch.randint(low=S_min, high=S, size=(N,), dtype=torch.long)
ctc_loss = nn.CTCLoss()
loss = ctc_loss(input, target, input_lengths, target_lengths)
loss.backward()
# ======================================================================
# # Target are to be un-padded
# T = 50  # Input sequence length
# C = 20  # Number of classes (including blank)
# N = 16  # Batch size
# # Initialize random batch of input vectors, for *size = (T,N,C)
# input = torch.randn(T, N, C).log_softmax(2).detach().requires_grad_()
# input_lengths = torch.full(size=(N,), fill_value=T, dtype=torch.long)
# # Initialize random batch of targets (0 = blank, 1:C = classes)
# target_lengths = torch.randint(low=1, high=T, size=(N,), dtype=torch.long)
# target = torch.randint(low=1, high=C, size=(sum(target_lengths),), dtype=torch.long)
# ctc_loss = nn.CTCLoss()
# loss = ctc_loss(input, target, input_lengths, target_lengths)
# loss.backward()
# ===============================================================================
# # Target are to be un-padded and unbatched (effectively N=1)
# T = 50  # Input sequence length
# C = 20  # Number of classes (including blank)
# # Initialize random batch of input vectors, for *size = (T,C)
# input = torch.randn(T, C).log_softmax(1).detach().requires_grad_()
# input_lengths = torch.tensor(T, dtype=torch.long)
# # Initialize random batch of targets (0 = blank, 1:C = classes)
# target_lengths = torch.randint(low=1, high=T, size=(), dtype=torch.long)
# target = torch.randint(low=1, high=C, size=(target_lengths,), dtype=torch.long)
# ctc_loss = nn.CTCLoss()
# loss = ctc_loss(input, target, input_lengths, target_lengths)
# loss.backward()
