from neuronnetwork import network
from scipy.special import expit
import numpy as np

#sigmod激活函数
def logi_func(x):
    return expit(x)

#sigmod激活函数的导数
def logi_func_grad(x):
    t = logi_func(x)
    return (1-t)*t
#损失函数
def mseloss(x,y):
    return ((y-x)**2)/2

#损失函数导数
def mseloss_derivative(x,y):
    return -(y-x)

#x是训练数据样本
x = np.array( list(reversed([i for i in range(30)]))).reshape(30,1)
#y是训练数据样本标签
y = np.array( list(reversed([2*i for i in range(30)]))).reshape(30,1)
traindata = [x,y]
n = network(traindata, mseloss,mseloss_derivative, (1, 4, logi_func, logi_func_grad), (4, 1, logi_func, logi_func_grad))

#将这批数据训练一百次，每一次训练完之后打乱样本之间的先后顺序
for i in range(10):
    #学习速率为0.001
    loss = n.optim(0.001)

    print(loss)
    n.shuffleinputdata()