SimpleNN / utils.py
ricardo-lsantos's picture
Fixed minor bugs. Fixed the Ui parameters to be more easy to train.
b7f5a9c
raw
history blame contribute delete
399 Bytes
import math
def sigmoid(x):
return 1.0 / (1.0 + math.exp(-x))
def sigmoid_prime(x): # x already sigmoided
return x * (1 - x)
def relu(x):
return max(0,x)
def relu_prime(x):
return 1 if x>0 else 0
def tanh(x):
return math.tanh(x)
def tanh_prime(x):
return 1 - x**2
def softmax(x):
return math.exp(x) / (math.exp(x) + 1)
def softmax_prime(x):
return x * (1 - x)