'''
Created on 2014/03/18

@author: phuvieng
'''
class Hypothesis(object):
    def __init__(self):
        self.h_X = None
#     def getHts(self, X, theta_0, theta_I):
    def getHts(self, Parameters, Trainning):
        self.h_X = Trainning[0]
        self.h_theta_0 = Parameters[0]
        self.h_theta_I = Parameters[1]
        return self.h_theta_0 + self.h_theta_I * self.h_X
class CostFunction(object):
    def __init__(self):        
        self.c_par = None
        self.c_tra = None
        self.c_result = None
    def getCst(self, Parameters, Trainning):
        self.c_par = Parameters
        self.c_tra = Trainning
        result = 0.0
        Hx = Hypothesis()
        for sample in self.c_tra:
            Sum = Hx.getHts(self.c_par, sample) - sample[1]
            result += Sum ** 2
        self.c_result = result / (2 * len(self.c_tra))
        return self.c_result
class GradientDescent(object):
    def __init__(self, alpha, Parameters, Trainning):
        self.g_par = Parameters
        self.g_tra = Trainning
        self.g_alp = alpha
        
    def getMin(self):
        Hx = Hypothesis()
        Jx = CostFunction()                
        numIterations = 0
        while True: 
            temp0 = None
            temp1 = None  
            drv0 = 0.0
            drv1 = 0.0         
            for sample in self.g_tra:    
                demp = Hx.getHts(self.g_par, sample) - sample[1]
                drv0 += demp
                drv1 += demp * sample[0]                
            temp0 = self.g_par[0] - self.g_alp * drv0 / len(self.g_tra)
            temp1 = self.g_par[1] - self.g_alp * drv1 / len(self.g_tra)
            
            JxNew = Jx.getCst([temp0, temp1], self.g_tra) 
            JxOld = Jx.getCst(self.g_par, self.g_tra)
            print 'Cost',JxOld,'\tTheta',self.g_par[0], ' & ', self.g_par[1]
            if(JxNew < JxOld):                
                self.g_par = [temp0, temp1]
                numIterations += 1
            else:
                print numIterations
                break

    
#------------------------------------------------------------------------------ 

# Trainning = [[2104, 460], [1416, 232], [4867, 315]]
Trainning = [[0.0, 0.0],[1.0, 1.0], [2.0, 2.0], [3.0, 3.0]]
Parameters = [1.0, 1.0]
Alpha = 0.01
Mn = GradientDescent(Alpha, Parameters, Trainning)
Mn.getMin()

