#https://zhuanlan.zhihu.com/p/136422177
# -*- coding: utf-8 -*-

import numpy as np
import pandas as pd
from sklearn import datasets
from sklearn.model_selection import train_test_split
import math
import matplotlib.pyplot as plt

def sigmoid(X,W,b):
    # print(b)
    return 1/(1 + np.exp(-np.dot(X,W.T) - b))

class Exp_Decayer():
    def __init__(self,init=False,lr=0.01,decay_rate=0.9,decay_steps=1):
        self.lr=lr
        self._init=init
        self.global_step=0
        self.decay_rate=decay_rate
        self.decay_steps=decay_steps
        self.global_step=0

    def rate(self):
        self.global_step+=1
        if(self._init):
            self.lr=self.lr*math.pow(self.decay_rate,(self.global_step/self.decay_steps))
        return self.lr

class LogisticRegression():
    def __init__(self,epochs=5,lr=0.01,decay=False,decay_rate=0.9,decay_steps=1):
        self.W=None
        self.b=None
        self.loss=None
        self.epochs=epochs
        self._lr_decay=decay
        self.decayer=Exp_Decayer(decay,lr,decay_rate,decay_steps)

    def predict(self, X):
        P = sigmoid(X, self.W, self.b)
        return np.where(P > 0.5, 1, 0)

    def fit(self,X,Y):
        np.random.seed(10)
        self.W=np.random.normal(size=X.shape[1])
        self.b=0
        LR=np.zeros(self.epochs)
        for epoch in range (self.epochs):
            if(epoch%10==0):
                print("epoch:%d/%d"%(epoch,self.epochs))
            delta_w=(X.T.dot(-(Y - sigmoid(X, self.W, self.b))))/X.shape[0] #想实现构造一个矩阵delta_w，其维度与矩阵W一致，并为其初始化为全0；
            delta_b=(-(Y-sigmoid(X,self.W,self.b)).T).mean()
            lr=self.decayer.rate()
            LR[epoch]=lr
            self.W-=lr*delta_w
            self.b-=lr*delta_b
            # print("delta_w:",delta_w)
            print(self.b)
            # print("P:",sigmoid(X, self.W, self.b),Y)
            print("accuracy:",accuracy(self.predict(X),Y))

        plt.title("LR")
        plt.plot(range(self.epochs),LR,'g')
        plt.show()
        return self


def accuracy(pred,true):
    return np.count_nonzero(pred==true)/len(pred)

def normalize(x):
    return (x - np.min(x))/(np.max(x) - np.min(x))

if __name__ == "__main__":
    # input datasets
    digits = datasets.load_breast_cancer()
    X = digits.data
    y = digits.target
    # 归一化
    X_norm = normalize(X)
    # X_norm=X
    X_train = X_norm[:int(len(X_norm)*0.8)]
    X_test = X_norm[int(len(X_norm)*0.8):]
    y_train = y[:int(len(X_norm)*0.8)]
    y_test = y[int(len(X_norm)*0.8):]
    # model 1
    # lr = LogisticRegression(epochs=2000,lr=0.3,decay=False)
    #learning_decay
    lr = LogisticRegression(epochs=200, lr=0.9, decay=True, decay_rate=0.95, decay_steps=500)
    lr.fit(X_train,y_train)
    y_pred = lr.predict(X_test)
    # 评估准确率
    acc = accuracy(y_pred, y_test)
    print("acc", acc)
    plt.scatter(X_test[:,0], y_pred-0.1,color="green")
    plt.scatter(X_test[:, 0], y_test, color="red")
    plt.show()


