# coding=utf-8

import numpy as np
import matplotlib.pyplot as plt
from random import *
def loadDataset(path='testSet.txt'):
    fp = open(path)
    matrixCof = []
    classLabels = []
    for line in fp.readlines():
        vals = line.strip().split('\t')
        matrixCof.append([1.0, float(vals[0]), float(vals[1])])
        classLabels.append(int(vals[2]))
    fp.close()
    return matrixCof, classLabels


# 均为矩阵操作,返回的是一个numpy的matrix
def sigmod(z):
    return 1.0/(1+np.exp(-z))


# 绘制简单的两种类型点集以及拟合直线
# errorID是个list表示最后集合中错误的点的编号,也就是在matrixPoint中的下标
def drawScatter(matrixPoint, classLabels, weights, errorID=[]):
    x0=[]; y0=[]; x1=[]; y1=[]
    for i in range(len(classLabels)):
        if classLabels[i]==0:
            x0.append(matrixPoint[i, 0])
            y0.append(matrixPoint[i, 1])
        else:
            x1.append(matrixPoint[i, 0])
            y1.append(matrixPoint[i, 1])

    fig = plt.figure()
    ax = fig.add_subplot(111)
    plt.title("classify scatters")
    plt.xlabel("feature-x")
    plt.ylabel("feature-y")
    type0 = ax.scatter(x0, y0, c='green', marker='^', s=50)
    type1 = ax.scatter(x1, y1, c='blue', marker='o', s=50)

    x = np.arange(-3.0,3.0,0.1)
    y = (-weights[0]-weights[1]*x)/weights[2]
    ax.plot(x, y)


    if len(errorID)!=0:
        errorx = []
        errory = []
        for index in errorID:
            errorx.append(matrixPoint[index, 0])
            errory.append(matrixPoint[index, 1])
        type2 = plt.scatter(errorx, errory, c='red', marker='x', s=80)

    #plt.legend((11,12,13),('zero','one','error'))
    # loc=1表示图例放在右上角,1其实也是默认值
    ax.legend([type0, type1, type2], ["class0", "class1", "error"], loc=1)
    plt.show()
    plt.close()

# 最基础最暴力的逻辑回归法
def logistic(matrixPoint, classLabels):
    matrixPoint = np.matrix(matrixPoint)
    n,m = np.shape(matrixPoint)
    #matrixPoint.transpose()
    weights = np.ones((m,1))
    classLabels = np.matrix(classLabels).transpose()
    alpha = 0.001
    # 绘制图形用于检测weights三个数值在整个过程中变化的稳定性
    y0=[] ; y1=[] ; y2=[]
    x=[]
    for i in range(600):
        res = sigmod(matrixPoint*weights)
        error = classLabels-res
        weights += alpha*matrixPoint.transpose()*error

        x.append(i+1)
        y0.append(weights[0, 0])
        y1.append(weights[1, 0])
        y2.append(weights[2, 0])
    fig = plt.figure()
    ax = fig.add_subplot(111)
    type0 = ax.plot(x, y0)
    type1 = ax.plot(x, y1)
    type2 = ax.plot(x, y2)

    # ax.legend([type0, type1, type2], ['weight0', 'weight1', 'weight2'], loc=2)
    plt.show()
    return weights.transpose()


# 改进的逻辑回归法替代logistic函数
def stoGradAscent(matrixPoint, classLabels):
    matrixPoint = np.matrix(matrixPoint)
    n,m = np.shape(matrixPoint)
    #matrixPoint.transpose()
    weights = np.ones((m,1))
    classLabels = np.matrix(classLabels).transpose()
    alpha = 0.2

    for j in range(400):
        # 给定一个可以从中随机选择的编号集合
        choices = [x for x in range(n)]
        for i in range(n):
            # 随机选择一个标号
            id = int(random()*len(choices))
            # 此处res返回的是一个1*1的numpy矩阵
            res = sigmod(matrixPoint[id]*weights)
            error = classLabels[id, :]-res
           # print "error\n" , error, matrixPoint[id, :].transpose()
            weights += alpha*(1.0/(i+j+1))*matrixPoint[id, :].transpose()*error
            # 从选择列表中删除这个已经被选中的选项
            choices.remove(choices[id])
    return weights.transpose()


def classifyDatasets(matrixPoint, weights):
    matrixPoint = np.matrix(matrixPoint)
    weights = np.matrix(weights)
    n,m = np.shape(matrixPoint)
    classifyRes = []
    for i in range(n):
        z = (matrixPoint[i]*weights.transpose())
        if sigmod(z)[0, 0] >= 0.5:
            classifyRes.append(1)
        else:
            classifyRes.append(0)
    return classifyRes


# 我们保证标准的答案和输入的答案集合元素个数相同,返回错误率和错误的集合编号
def resultCompare(standardRes, inputRes):
    errorRes = []
    for i in range(len(inputRes)):
        if standardRes[i] != inputRes[i]:
            errorRes.append(i)
    return len(errorRes)*1.0/len(inputRes), errorRes


#主运行函数
def generateAndCheckModel(path='testSet.txt'):
    matrixCof, classLabels = loadDataset(path)

    matrixPoint = np.matrix(matrixCof)

    # 强调一点,这里的weights返回得到的是一个1*3的矩阵
    # logistic和stoGradAscent是两个功能相同的函数,但第二个是随机梯度上升降低复杂度的优化算法
    # 单从这个例子来说,暴力算法总是能比随机优化算法精确度要高,但毕竟暴力算法复杂度太高了,实际当中不适用
    # weights = logistic(matrixPoint, np.matrix(classLabels))
    weights = stoGradAscent(matrixPoint, np.matrix(classLabels))

    classifyRes = classifyDatasets(matrixPoint, weights)
    print classifyRes
    errorRate, errorLabels = resultCompare(classLabels, classifyRes)
    print "这个模型的错误率是: ",errorRate
    print "这个模型的错误集合是: ",errorLabels
    drawScatter(matrixPoint[:,1:], classLabels, weights[0], errorLabels)

if __name__ == '__main__':
    generateAndCheckModel()