#-*- coding: utf-8 -*-
'''
Created on 2.16, 2016
logregression_gradient descent

Input:      inX: vector to compare to existing dataset (100x3)
            regressiondata.text
Output:     the most popular class label(0/1)

@author: XFBY
'''
import numpy as np
import matplotlib.pyplot as plo
import os
import time
#alpha = 1
def splidata(data):
    dataset = []
    labels = []
    datas = open(data,'r')
    for i in datas:
        temdata = i[:-5].split()
        temlable = i[-4:].split()
        dataset.append([temdata])
        labels.append([temlable])
        pass
    datas.close()
    np_dataset = np.mat(dataset)
    np_labels = np.mat(labels)
    print(np_dataset,np_labels)
    return np_dataset,np_labels
#    print(np_dataset,'\n',np_labels)
def TrainModel(maxnum,alpha,dataset,labeldata):
    k = 0
    cost = []
    theta = np.ones(2)
    theta.shape=(2,1)
    theta.transpose
    print(theta.shape)
    print(dataset.shape)
    while(k<maxnum):
        hx = dataset*theta
        loss = hx - labeldata
        erro = np.sum(loss**2)
        cost.append(erro)
        gradient = np.dot(dataset,loss)
        theta = theta - gradient*alpha
        pass
        k+= 1
    return theta,cost
da,la = splidata("regressiondata.text")
theta1 ,erro1 = TrainModel(500, 0.0001, da, la)
print(theta1,erro1)
        
