import numpy as np

# 激活函数
# mish?
def relu(x):
    return np.maximum(0, x)

def sigmoid(x):
    return 1 / (1 + np.exp(-x))

def tanh(x):
    return (np.exp(x) - np.exp(-x)) / (np.exp(x) + np.exp(-x))

def softmax(x):
    x_max = np.max(x, axis=-1, keepdims=True)
    exp_x = np.exp(x - x_max)
    return exp_x / np.sum(exp_x, axis=-1, keepdims=True)

def gelu(x):
    return 0.5*x*(1 + np.tanh(np.sqrt(2/np.pi)*(x + 0.044715*x**3)))

def swish(x, beta=1.0):
    return x * sigmoid(beta * x)

def swiglu(x, w1, w2, b1, b2, beta=1.0):
    return swish(np.dot(x, w1) + b1, beta) * (np.dot(x, w2) + b2)

# 损失函数
# focal loss?
def mse(y_true, y_pred):
    return (y_true - y_pred)**2 / y_true.shape[0]

def cross_entropy(y_true, y_pred):
    y_pred_p = softmax(y_pred)
    return -np.sum(y_true * np.log(y_pred_p)) / y_pred.shape[0]

def NLLloss(y_true, y_pred):
    return -np.sum(y_true * np.log(y_pred))

def kl(y_true, y_pred):
    return np.sum(y_true * np.log(y_true / y_pred))


