import math

print(math.tanh(1.59))
print(math.tanh(2.29))
print(math.tanh(1.62))
print(math.tanh(0.87))
print("************************")

print(math.tanh(3.9790))
print(math.tanh(4.2872))
print(math.tanh(3.1204))
print(math.tanh(2.4700))
print("************************")

print(math.tanh(3.8057))
print(math.tanh(4.3260))
print(math.tanh(2.5572))
print(math.tanh(2.2859))

import numpy as np


def softmax(x):
    """ softmax function """

    # x -= np.max(x, axis=1, keepdims=True)  # 为了稳定地计算softmax概率， 一般会减掉最大的那个元素
    # print("减去行最大值 ：\n", x)

    x = np.exp(x) / np.sum(np.exp(x), axis=1, keepdims=True)
    # 设置科学计数法，保留四位小数
    np.set_printoptions(formatter={'float': lambda x: "{:.4e}".format(x)})

    return x


x = [[70,75.5,102.5],
    [58,79,101.5],
    [50,55.5,71.5]]
print("原始 ：\n", x)

x_ = softmax(x)
print("变换后 ：\n", x_)
print("********************")
print(np.sum(np.exp(x), axis=1, keepdims=True))