import numpy as np


# Activation function
###
def relu(val):
    return np.maximum(0, val)


def sigmoid(val):
    return 1 / (1 + np.exp(-val))


def softmax(val):
    val -= np.max(val)
    return np.exp(val) / np.sum(np.exp(val))
###


# Cost function
###
def mean_squared_cost(y_true, y_pred):
    n = y_true.shape[0]
    return 1. / n * 0.5 * np.sum((y_pred - y_true) ** 2)


def cross_entropy_cost(y_true, y_pred):
    n = y_true.shape[0]
    bias = 1e-7
    return - 1. / n * np.sum(np.log(y_pred + bias) * y_true)
# derivative
# mse == y_pred - y_true
# cee == -np.divide(y_true, y_pred)
###


# One-hot
def one_hot(class_num, label):
    size = label.shape[0]
    result = np.zeros((size, class_num))
    for i in range(size):
        result[i][int(label[i])] = 1
    return result


# Learning rate decay
def dynamic_lr(lr, epoch, decay=0.4):
    lr = lr / (1 + decay * epoch)
    return lr


# Data shuffling
def shuffle(data, label):
    size = label.shape[0] - 1
    times = size // 2 + 1
    for i in range(times):
        index = np.random.randint(0, size)
        tmp = data[index].copy()
        data[index] = data[size]
        data[size] = tmp
        tmp = label[index].copy()
        label[index] = label[size]
        label[size] = tmp


# mean variance normalization
###
def get_mean_variance(X):
    size = X.shape[1]
    tmp = X.copy()
    tmp = tmp.T
    mean = [i for i in range(size)]
    variance = [i for i in range(size)]
    for i in range(size):
        mean[i] = np.mean(tmp[i])
        variance[i] = np.std(tmp[i])
        tmp[i] = (tmp[i] - mean[i])/variance[i]
    return mean, variance


def do_normalize(X, mean, variance):
    size = X.shape[1]
    assert size == len(mean) == len(variance)
    tmp = X.copy()
    tmp = tmp.T
    for i in range(size):
        tmp[i] = (tmp[i] - mean[i])/variance[i]
    return tmp.T
###
