#!/usr/bin/env python3
#-*- coding:utf8 -*-
# Power by 2020-05-30 19:43:20

"""
File: network-test.py
Author: "caotian6666"
Email: "caotiandyx@163.com"
Github: "https://gitee.caotian6666.com"
Description: 探索模型设计和参数求解过错
"""

import sys
import os 
curpath=os.path.abspath(os.curdir)
sys.path.append(curpath)
import loaddata
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
'''
python import模块时， 是在sys.path里按顺序查找的。
sys.path是一个列表，里面以字符串的形式存储了许多路径。
使用A.py文件中的函数需要先将他的文件路径放到sys.path中
'''
import numpy as np
class NetWork(object):

    """神经元迭代单位"""

    def __init__(self,num_of_weights):
        """2个参数 w b

        :num_of_weights: TODO

        """
        np.random.seed(0)
        self.w=np.random.randn(num_of_weights,1)
        self.b=0.
    def forward(self, x):
        """y=wx+b

        :x: TODO
        :returns: TODO

        """
        z=np.dot(x,self.w)+self.b
        return z
    def loss(self, z,y):
        """均方差 损失函数

        :z: TODO
        :returns: TODO

        """
        return np.mean((z-y)*(z-y))
    def gradient(self, x,y):
        """梯度

        :x: TODO
        :y: TODO
        :returns: TODO

        """
        gradientW=(self.forward(x) -y)*x
        gradientW=np.mean(gradientW,axis=0)
        gradientW=gradientW[:,np.newaxis]
        gradientB=self.forward(x)-y
        gradientB=np.mean(gradientB)
        return gradientW,gradientB
    def update(self,gradientW,gradientB,eta=0.01):
        """next network

        :gradientW5: TODO
        :gradientW9: TODO
        :eta: TODO
        :returns: TODO

        """
        self.w=self.w - gradientW*eta
        self.b=self.b - gradientB*eta
    def train(self, x,y,iterations=100,eta=0.01):
        """train

        :x: TODO
        :y: TODO
        :iterations: TODO
        :eta: TODO
        :returns: TODO

        """
        losses=[]
        for i in range(iterations):
            z=self.forward(x)
            l=self.loss(z,y)
            gradientW,gradientB=self.gradient(x,y)
            self.update(gradientW,gradientB,eta)
            losses.append(l)
            if i % 50 == 0:
                print('iter {}, loss {}'.format(i, l))
        return losses
if __name__ == "__main__":
    training_data,test_data=loaddata.load_data()
    x=training_data[:,:-1]
    y=training_data[:,-1:]
    net=NetWork(13)
    num_iterations=1000
    losses=net.train(x,y,iterations=num_iterations,eta=0.01)
    plot_x=np.arange(num_iterations)
    plot_y=np.array(losses)
    plt.plot(plot_x,plot_y)
    plt.show()

    #x1=x[0]
    #z=net.forward(x1)
    #w5=np.arange(-160.0,160.0,1.0)        
    #w9=np.arange(-160.0,160.0,1.0)        
    #losses=np.zeros([len(w5),len(w9)])
    #for w5index in range(len(w5)):
    #    for w9index in range(len(w9)):
    #       net.w[5]=w5[w5index] 
    #       net.w[9]=w5[w9index] 
    #       z=net.forward(x)
    #       loss=net.loss(z,y)
    #       losses[w5index,w9index]=loss
    #fig=plt.figure()
    #ax=Axes3D(fig)
    #w5,w9=np.meshgrid(w5,w9)
    #ax.plot_surface(w5,w9,losses,rstride=1,cstride=1,cmap='rainbow')
    #plt.show()


