#!/usr/bin/python
# author dennis
# 2022年07月18日
import random

import numpy as np
from numpy import *

filename = '../Perceptron/test.txt'  # 文件目录


def loadDataSet():  # 读取数据（这里只有两个特征）
    dataMat = []
    labelMat = []
    fr = open(filename)
    for line in fr.readlines():
        lineArr = line.strip().split()
        dataMat.append([1.0, float(lineArr[0]), float(lineArr[1])])  # 增广特征向量
        labelMat.append(int(lineArr[2]))
    return dataMat, labelMat


def sigmoid1(x):
    indices_pos = np.nonzero(x >= 0)
    indices_neg = np.nonzero(x < 0)

    y = np.zeros_like(x)
    y[indices_pos] = 1 / (1 + np.exp(-x[indices_pos]))
    y[indices_neg] = np.exp(x[indices_neg]) / (1 + np.exp(x[indices_neg]))

    return y

def sigmoid(x):
    if x>=0:
        return 1/(1+exp(-x))
    else:
        return exp(x)/(1+exp(x))


def gradAscent(dataMat, labelMat):  # 梯度上升求最优参数
    dataMatrix = mat(dataMat)  # 将读取的数据转换为矩阵
    classLabels = mat(labelMat).transpose()  # 将读取的数据转换为矩阵
    m, n = shape(dataMatrix)
    alpha = 0.001
    maxCycles = 500
    weights = np.ones((n, 1))
    for k in range(maxCycles):
        h = sigmoid(dataMatrix * weights)
        print(h)
        error = (classLabels - h)  # 求导后差值
        weights = weights + alpha * dataMatrix.transpose() * error  # 迭代更新权重
    return weights


def stocGradAscent0(dataMat, labelMat):                                   # 随机梯度上升，当数据量比较大时，每次迭代都选择全量数据进行计算，计算量会非常大。所以采用每次迭代中一次只选择其中的一行数据进行更新权重。
    dataMatrix = mat(dataMat)
    classLabels = labelMat
    m, n = shape(dataMatrix)
    alpha = 0.01
    weights = ones((n, 1))
    for i in range(m):  # 遍历计算每一行
        h = sigmoid(sum(dataMatrix[i] * weights))
        error = classLabels[i] - h
        weights = weights + alpha * error * dataMatrix[i].transpose()
    return weights


def stocGradAscent1(dataMat, labelMat):
    dataMatrix = mat(dataMat)
    classLabels = labelMat
    m, n = shape(dataMatrix)
    weights = ones((n, 1))
    maxCycles = 500
    dataIndex = [i for i in range(m)]
    for j in range(maxCycles):  # 迭代
        for i in range(m):  # 随机遍历每一行
            alpha = 4 / (1 + j + i) + 0.0001  # 随迭代次数增加，权重变化越小。
            randIndex = int(random.uniform(0, len(dataIndex)))  # 随机抽样
            h = sigmoid(sum(dataMatrix[randIndex] * weights))
            error = classLabels[randIndex] - h
            weights = weights + alpha * error * dataMatrix[randIndex].transpose()
            del (list(dataIndex)[randIndex])  # 去除已经抽取的样本
    return weights


def mini_batchGradDescent(dataMat, labelMat):  # 小批量梯度下降法
    dataMatrix = mat(dataMat)
    classLabels = mat(labelMat).transpose()
    m, n = shape(dataMatrix)
    weights = ones((n, 1))
    mini_batch_size = 10
    num_complete_minbatches = math.floor(m / mini_batch_size)
    for j in arange(500):  # 迭代
        for i in range(0, num_complete_minbatches):
            alpha = 4 / (1 + j + i) + 0.0001
            if m % mini_batch_size == 0:
                mini_batch = dataMatrix[i * mini_batch_size:(i + 1) * mini_batch_size]
                h = sigmoid1((mini_batch * weights))
                error = (classLabels[i * mini_batch_size:(i + 1) * mini_batch_size] - h)  # 求导后差值
            else:
                mini_batch = dataMatrix[mini_batch_size * num_complete_minbatches:]
                h = sigmoid1((mini_batch * weights))
                error = (classLabels[mini_batch_size * num_complete_minbatches:] - h)  # 求导后差值

            weights = weights + alpha * mini_batch.transpose() * error  # 迭代更新权重
    return weights


# 显示数据
def plotBestFit(weights):  # 画出最终分类的图
    import matplotlib.pyplot as plt
    dataMat, labelMat = loadDataSet()
    dataArr = array(dataMat)
    # print(dataArr)
    n = shape(dataArr)[0]
    # 正样本
    xcord1 = []
    ycord1 = []
    # 负样本
    xcord2 = []
    ycord2 = []
    # 根据数据集标签进行分类
    for i in range(n):
        if int(labelMat[i]) == 1:
            # 1为正样本
            xcord1.append(dataArr[i, 1])
            ycord1.append(dataArr[i, 2])
        else:
            # 0为负样本
            xcord2.append(dataArr[i, 1])
            ycord2.append(dataArr[i, 2])
    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax.scatter(xcord1, ycord1, s=30, c='red', marker='s')
    ax.scatter(xcord2, ycord2, s=30, c='green')
    x = arange(-3.0, 3.0, 0.1)
    y = (-weights[0] - weights[1] * x) / weights[2]
    print(x)
    print(y)
    ax.plot(x, y)
    plt.xlabel('X1')
    plt.ylabel('X2')
    plt.show()


def main():
    dataMat, labelMat = loadDataSet()
    # weights0 = gradAscent(dataMat, labelMat).getA()
    # print(weights0)
    # plotBestFit(weights0)
    # weights1 = stocGradAscent0(dataMat, labelMat).getA()
    # print(weights1)
    # plotBestFit(weights1)
    # weights2 = stocGradAscent1(dataMat, labelMat).getA()
    # print(weights2)
    # plotBestFit(weights2)
    weights3 = mini_batchGradDescent(dataMat, labelMat).getA()
    print(weights3)
    plotBestFit(weights3)


if __name__ == '__main__':
    main()
