# -*- coding: utf-8 -*-
"""
梯度的调试
Created on Mon Feb 26 20:03:18 2018

@author: Allen
"""

import numpy as np

np.random.seed( 666 )
X = np.random.random( size = ( 1000, 10 ) )
X_b = np.hstack( ( np.ones( ( len( X ), 1 ) ), X ) )

# 真是的theta
true_theta = np.arange( 1, 12, dtype = float )

y = X_b.dot( true_theta ) + np.random.normal( size = len( X ) )

# 损失函数
def J( theta, X_b, y ):
    try:
        return np.sum( ( y - X_b.dot( theta ) ) ** 2 )/ len( X_b )
    except:
        return float( "inf" )

# 梯度函数
def dJ_math( theta, X_b, y ):
    return X_b.T.dot( X_b.dot( theta ) - y ) * 2.0 / len( y )

# 梯度函数调试
def dJ_debug( theta, X_b, y, epsilon = 0.01 ):
    res = np.empty( len( theta ) )
    for i in range( len( theta ) ):
        theta_1 = theta.copy()
        theta_1[i] = theta_1[i] + epsilon
        theta_2 = theta.copy()
        theta_2[i] = theta_2[i] - epsilon
        res[i] = ( J( theta_1, X_b, y ) - J( theta_2, X_b, y ) ) / ( 2 * epsilon ) # 这一点有点不太明白
    return res

# 预测出 theta
def gradient_descent( dJ, X_b, y, initial_theta, eta, n_iters = 1e4, epsilon = 1e-8 ):
            
    theta = initial_theta
    i_iter = 0
    
    while i_iter < n_iters:
        last_theta = theta
        gredient = dJ( theta, X_b, y )
        theta = theta - eta * gredient
        
        if np.abs( J( theta, X_b, y ) - J( last_theta, X_b, y ) ) < epsilon:
            break
        
        i_iter += 1
        
    return theta

X_b = np.hstack( ( np.ones( ( len( X ), 1 ) ), X ) )
initial_theta = np.zeros( ( X_b.shape[1] ) )
theta = gradient_descent( dJ_math, X_b, y, initial_theta, eta = 0.01, n_iters = 1e4, epsilon = 1e-8 )
print( theta )
'''
[  1.1251597    2.05312521   2.91522497   4.11895968   5.05002117
   5.90494046   6.97383745   8.00088367   8.86213468   9.98608331
  10.90529198]
'''
theta = gradient_descent( dJ_debug, X_b, y, initial_theta, eta = 0.01, n_iters = 1e4, epsilon = 1e-8 )
print( theta )
'''
[  1.1251597    2.05312521   2.91522497   4.11895968   5.05002117
   5.90494046   6.97383745   8.00088367   8.86213468   9.98608331
  10.90529198]
'''