# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import torch


# TODO maybe push this to nn?
# def smooth_l1_loss(input, target, beta=1. / 9, size_average=True):
#     """
#     very similar to the smooth_l1_loss from pytorch, but with
#     the extra beta parameter
#     """
#     n = torch.abs(input - target)
#     cond = n < beta
#     loss = torch.where(cond, 0.5 * n ** 2 / beta, n - 0.5 * beta)
#     if size_average:
#         return loss.mean()
#     return loss.sum()

def smooth_l1_loss(input, target, beta=1.0 / 9, size_average=True, reduction=None):
    n = torch.abs(input - target)
    if beta < 1e-5:
        # 退化为 L1：完全避免除以 0，也避免 where 同时计算两分支
        loss = n
    else:
        loss = torch.empty_like(n)
        m = n < beta
        loss[m] = 0.5 * (n[m] * n[m]) / beta
        loss[~m] = n[~m] - 0.5 * beta
        
    if reduction == "sum":
        loss = loss.sum()
    elif reduction == "mean":
        loss = loss.mean()
    # 兼容 size_average
    if size_average and reduction == "none":
        return loss.mean()
    if (not size_average) and reduction == "none":
        return loss.sum()
    return loss