# -*- coding: utf-8 -*-
"""
@Time    : 2023/11/8 10:56
@Author  : itlubber
@Site    : itlubber.art
"""
import numpy as np
from scipy import optimize, special


class FocalLoss:
    """
    https://maxhalford.github.io/blog/lightgbm-focal-loss/#benchmarks
    https://github.com/jrzaurin/LightGBM-with-Focal-Loss
    https://arxiv.org/pdf/1708.02002.pdf
    http://www.joca.cn/article/2022/1001-9081/1001-9081-2022-42-7-2256.shtml
    """

    def __init__(self, gamma=2., alpha=None):
        self.alpha = alpha
        self.gamma = gamma

    def at(self, y):
        if self.alpha is None:
            return np.ones_like(y)
        return np.where(y, self.alpha, 1 - self.alpha)

    def pt(self, y, p):
        p = np.clip(p, 1e-15, 1 - 1e-15)
        return np.where(y, p, 1 - p)

    def __call__(self, y_true, y_pred):
        at = self.at(y_true)
        pt = self.pt(y_true, y_pred)
        return -at * (1 - pt) ** self.gamma * np.log(pt)

    def grad(self, y_true, y_pred):
        y = 2 * y_true - 1  # {0, 1} -> {-1, 1}
        at = self.at(y_true)
        pt = self.pt(y_true, y_pred)
        g = self.gamma
        return at * y * (1 - pt) ** g * (g * pt * np.log(pt) + pt - 1)

    def hess(self, y_true, y_pred):
        y = 2 * y_true - 1  # {0, 1} -> {-1, 1}
        at = self.at(y_true)
        pt = self.pt(y_true, y_pred)
        g = self.gamma

        u = at * y * (1 - pt) ** g
        du = -at * y * g * (1 - pt) ** (g - 1)
        v = g * pt * np.log(pt) + pt - 1
        dv = g * np.log(pt) + g + 1

        return (du * v + u * dv) * y * (pt * (1 - pt))

    def init_score(self, y_true):
        res = optimize.minimize_scalar(
            lambda p: self(y_true, p).sum(),
            bounds=(0, 1),
            method='bounded'
        )
        p = res.x
        log_odds = np.log(p / (1 - p))
        return log_odds

    def lgb_obj(self, preds, train_data):
        y = train_data.get_label()
        p = special.expit(preds)
        return self.grad(y, p), self.hess(y, p)

    def lgb_eval(self, preds, train_data):
        y = train_data.get_label()
        p = special.expit(preds)
        is_higher_better = False
        return 'focal_loss', self(y, p).mean(), is_higher_better


if __name__ == '__main__':
    import matplotlib
    import matplotlib.pyplot as plt
    from scipy import special

    fig, ax = plt.subplots(figsize=(10, 7))
    matplotlib.rc('font', size=14)

    np.random.seed(10)
    y = np.random.randint(2, size=500)  # random 0s and 1s

    for alpha in [.1, .5, .9]:
        for gamma in [1, 3]:
            fl = FocalLoss(alpha=alpha, gamma=gamma)
            ps = np.linspace(5e-2, 1 - 5e-2, 100)
            ls = [fl(y, p).sum() for p in ps]

            curve = ax.plot(ps, ls, label=r'$\alpha$ = %s, $\gamma$ = %s' % (alpha, gamma))[0]
            p = special.expit(fl.init_score(y))
            ax.axvline(p, color=curve.get_color(), linestyle='--')

    ax.legend()
    ax.grid()
    ax.set_title('Obtained initialization constants')
    ax.set_xlabel(r'$p$')
    ax.set_ylabel('Focal loss value')
    plt.show()
