# -*- coding: utf-8 -*-
'''
Created on Oct 27, 2010
Logistic Regression Working Module
@author: Peter & songting
'''
from numpy import *


def loadDataSet(filePath, num_feature):
    X = []
    y = []
    fr = open(filePath)
    for line in fr.readlines():
        # lineArr = line.strip().split()
        # X.append([1.0, float(lineArr[0]), float(lineArr[1])])
        # y.append(int(lineArr[2]))
        currLine = line.strip().split('\t')
        lineArr = [1.0]
        for i in range(num_feature):
            lineArr.append(float(currLine[i]))
        X.append(lineArr)
        y.append(float(currLine[num_feature]))
    return mat(X), mat(y).T


def sigmoid(z):
    """
    Sigmoid函数, h(z) = 1 / (1 + exp(-z))
    :param z:
    :return: 1.0 / (1 + exp(-z))
    """
    return 1.0 / (1.0 + exp(-z))


def batch_gradient_descent(X, y):
    """
    bath梯度下降算法, 计算成本大
    :param X:
    :param y:
    :return: theta
    """
    m, n = shape(X)
    # 初始步长
    alpha = 0.001
    maxCycles = 500
    # 初始化theta的值都为1
    theta = ones((n, 1))
    ## 迭代500次, 按负梯度方向更新theta
    for k in range(maxCycles):  # heavy on matrix operations
        h = sigmoid(X * theta)  # matrix mult
        theta = theta - alpha * X.T * (h - y)  # matrix mult
    return theta


def plotBestFit(theta):
    import matplotlib.pyplot as plt
    X, y = loadDataSet('testSet.txt', 2)
    dataArr = array(X)
    n = shape(dataArr)[0]
    xcord1 = []
    ycord1 = []
    xcord2 = []
    ycord2 = []
    for i in range(n):
        if int(y[i]) == 1:
            xcord1.append(dataArr[i, 1])
            ycord1.append(dataArr[i, 2])
        else:
            xcord2.append(dataArr[i, 1])
            ycord2.append(dataArr[i, 2])
    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax.scatter(xcord1, ycord1, s=30, c='red', marker='s')
    ax.scatter(xcord2, ycord2, s=30, c='green')
    x = arange(-3.0, 3.0, 0.1)
    y = (-theta[0] - theta[1] * x) / theta[2]
    ax.plot(x, y)
    plt.xlabel('X1')
    plt.ylabel('X2')
    plt.show()


def stochastic_gradient_descent0(X, y):
    """
    随机梯度下降算法
    :param X:
    :param y:
    :return: theta
    """
    m, n = shape(X)
    alpha = 0.01
    theta = ones((n, 1))  # initialize to all onessum
    for i in range(m):
        h = sigmoid(X[i] * theta)
        theta = theta - alpha * X[i].T * (h - y[i])
    return theta


def stochastic_gradient_descent1(X, y, numIter=150):
    """
    改进的随机梯度下降算法, 改进点:
    1.每次迭代调整步长alpha, 步长alpha 随迭代次数增加而减小
    2.随机选取样本点
    :param X:
    :param y:
    :param numIter:
    :return: theta
    """
    m, n = shape(X)
    theta = ones((n, 1))  # initialize to all ones
    for j in range(numIter):
        dataIndex = range(m)
        for i in range(m):
            alpha = 4 / (1.0 + j + i) + 0.01  # alpha decreases with iteration, does not
            # 随机选取样本点
            randIndex = int(random.uniform(0, len(dataIndex)))  # go to 0 because of the constant
            h = sigmoid(X[randIndex] * theta)
            # theta = theta - alpha * (theta's gradientient)
            theta = theta - alpha * X[randIndex].T * (h - y[randIndex])
            # 不重复抽样
            del (dataIndex[randIndex])
    return theta


def mini_batch_gradient_descent(X, y, mini_batch_size, alpha, iter=200):
    """
    小批量梯度下降算法
    :param X:
    :param y:
    :param mini_batch_size:
    :param alpha:
    :param iter:
    :return: theta
   """
    m, n = shape(X)
    theta = ones((n, 1))
    for j in range(iter):
        for i in range(m):
            # alpha = 4.0 / (1.0 + j + i) + 0.01
            h = sigmoid(X[i: mini_batch_size, ] * theta);
            theta = theta - alpha * X[i: mini_batch_size, ].T * (h - y[i: mini_batch_size, ])
            i += mini_batch_size
    return theta


def classifyVector(X, theta):
    """
    logistic分类
    :param X: 特征向量
    :param theta: 回归系数
    :return: 0 or 1
    """
    prob = sigmoid(X * theta)
    if prob > 0.5:
        return 1.0
    else:
        return 0.0


def colicTest():
    frTrain = open('horseColicTraining.txt')
    frTest = open('horseColicTest.txt')
    X, y = loadDataSet('horseColicTraining.txt', 21)
    # X_test, y_test = loadDataSet('horseColicTest.txt', 21)
    theta = stochastic_gradient_descent1(X, y, 1000)
    errorCount = 0
    numTestVec = 0.0
    for line in frTest.readlines():
        numTestVec += 1.0
        currLine = line.strip().split('\t')
        lineArr = [1.0]
        for i in range(21):
            lineArr.append(float(currLine[i]))
        if int(classifyVector(mat(lineArr), theta)) != int(currLine[21]):
            errorCount += 1
    errorRate = (float(errorCount) / numTestVec)
    print "the error rate of this test is: %f" % errorRate
    return errorRate


def multiTest(numTests=10):
    errorSum = 0.0
    for k in range(numTests):
        errorSum += colicTest()
    print "after %d iterations the average error rate is: %f" % (numTests, errorSum / float(numTests))
