import torch


def binary_entropy(prob: torch.Tensor) -> torch.Tensor:
    """
    :param prob: shape=[...]
    :return: shape=[...]
    """
    return -(torch.special.xlogy(prob, prob) +
             torch.special.xlogy(1 - prob, 1 - prob))


def binary_sigmoid_entropy(logit: torch.Tensor) -> torch.Tensor:
    """
    :param logit: shape=[...]
    :return: shape=[...]
    """
    prob = torch.sigmoid(logit)
    return binary_entropy(prob)


def binary_soft_accuracy(prob: torch.Tensor, label: torch.Tensor):
    """
    :param prob: shape=[...]
    :param label: shape=[...], bool/int/float
    :return: shape=[...]
    """
    if label.dtype.is_floating_point:
        label = label > 0.5
    else:
        label = label.to(dtype=torch.bool)
    return torch.where(label, prob, 1 - prob)


def binary_sigmoid_soft_accuracy(logit: torch.Tensor, label: torch.Tensor):
    """
    :param logit: shape=[...]
    :param label: shape=[...], bool/int/float
    :return: shape=[...]
    """
    prob = torch.sigmoid(logit)
    return binary_soft_accuracy(prob, label)


def binary_cross_entropy(prob: torch.Tensor, label: torch.Tensor) -> torch.Tensor:
    """
    :param prob: shape=[...]
    :param label: shape=[...], bool/int/float
    :return: shape=[...]
    """
    label = label.to(dtype=prob.dtype)
    return torch.nn.functional.binary_cross_entropy(prob, label, reduction='none')


def binary_sigmoid_cross_entropy(logit: torch.Tensor, label: torch.Tensor) -> torch.Tensor:
    """
    :param logit: shape=[...]
    :param label: shape=[...], bool/int/float
    :return: shape=[...]
    """
    label = label.to(dtype=logit.dtype)
    return torch.nn.functional.binary_cross_entropy_with_logits(logit, label, reduction='none')


def categorical_entropy(probs: torch.Tensor, dim: int) -> torch.Tensor:
    """
    :param probs: shape=[..., categories_n, ...]
    :return: shape=[..., ...]
    """
    return -torch.sum(torch.xlogy(probs, probs), dim=dim)


def categorical_softmax_entropy(logits: torch.Tensor, dim: int) -> torch.Tensor:
    """
    :param logits: shape=[..., categories_n, ...]
    :return: shape=[..., ...]
    """
    probs = torch.softmax(logits, dim=dim)
    log_probs = torch.log_softmax(logits, dim=dim)
    return -torch.sum(torch.special.xlogy(probs, log_probs), dim=dim)


def categorical_soft_accuracy(probs: torch.Tensor, label: torch.Tensor, dim: int) -> torch.Tensor:
    """
    :param probs: shape=[..., categories_n, ...]
    :param label: shape=[..., ...], int64
    :return: shape=[..., ...]
    """
    return torch.gather(probs, dim=dim, index=label.unsqueeze(dim)).squeeze(dim)


def categorical_softmax_soft_accuracy(logits: torch.Tensor, label: torch.Tensor, dim: int) -> torch.Tensor:
    """
    :param logits: shape=[..., categories_n, ...]
    :param label: shape=[..., ...], int64
    :return: shape=[..., ...]
    """
    probs = torch.softmax(logits, dim=dim)
    return categorical_soft_accuracy(probs, label, dim)


def categorical_cross_entropy(
    probs: torch.Tensor,
    label: torch.Tensor,
    dim: int, *,
    eps: torch.Tensor | float | None = None,
) -> torch.Tensor:
    """
    :param probs: shape=[..., categories_n, ...]
    :param label: shape=[..., ...], int64
    :return: shape=[..., ...]
    """
    eps = epsilon(probs.dtype, probs.device) if eps is None else eps
    log_probs = torch.log(probs + eps)
    # [1, categories_n, ...]

    log_probs = torch.moveaxis(log_probs, dim, 0)
    # [categories_n, ...]

    log_probs = torch.unsqueeze(log_probs, 0)
    # [1, categories_n, ...]

    label = torch.unsqueeze(label, 0)
    # [1, ...]

    cross_entropy = torch.nn.functional.nll_loss(log_probs, label, reduction='none')
    # [1, ...]

    cross_entropy = torch.squeeze(cross_entropy, 0)
    # [...]

    return cross_entropy


def categorical_softmax_cross_entropy(
    logits: torch.Tensor,
    label: torch.Tensor,
    dim: int,
) -> torch.Tensor:
    """
    :param logits: shape=[..., categories_n, ...]
    :param label: shape=[..., ...], int64
    :return: shape=[..., ...]
    """
    logits = torch.moveaxis(logits, dim, 0)
    # [categories_n, ...]

    logits = torch.unsqueeze(logits, 0)
    # [1, categories_n, ...]

    label = torch.unsqueeze(label, 0)
    # [1, ...]

    cross_entropy = torch.nn.functional.cross_entropy(logits, label, reduction='none')
    # [1, ...]

    cross_entropy = torch.squeeze(cross_entropy, 0)
    # [...]

    return cross_entropy


def epsilon(dtype: torch.dtype, device: torch.device | None = None) -> torch.Tensor:
    if dtype in (torch.float64, torch.float32, torch.bfloat16):
        return torch.asarray(1e-10, dtype=dtype, device=device)
    if dtype in (torch.float16,):
        return torch.asarray(1e-7, dtype=dtype, device=device)
    raise ValueError(f'Unsupported dtype: {dtype}')
