#手写逻辑回归
import numpy as np
import matplotlib as plot

#定义数据
x_train = np.array([30,30,50,50,60,60,80,80,90,90,40,40,70,70]).reshape(7,2)
y_train = np.array([1,1,0,0,0,1,0])

class Logistic_Regression:
    def __init__(self,x_train,y_train,size,a,m):
        '''
        :param x_train: 训练数据，影响因素
        :param y_train: 训练数据，标签
        :param size: 迭代次数
        :param a: 学习速率（梯度下降）
        :param m: batchSize
        '''
        self.x_train = x_train
        self.y_train = y_train
        self.size = size
        self.a = a
        self.m = m
        self.w = np.random.normal(size=(2,1)) #随机初始化权重w
    #编写sigmod函数
    def sigmod(self,X):
        return 1/(1+np.exp(-X))

    #train
    def train(self):
        for i in range(self.size):
            d_w = np.zeros((2,1))#记录导数
            for j in range(len(x_train)):
                x = np.mat(x_train[j]).T #将x作为列向量输入
                # print(self.w.T*x)
                dw = (self.sigmod(self.w.T*x)-y_train[j])[0,0]*x
                # print(dw)
                d_w+=dw

            #更新权重
            self.w -= self.a*d_w/self.m


    #predict
    def predict(self,x):
        '''
        测试准确性
        :return:
        '''
        x = np.mat(x).T
        s = self.sigmod(self.w.T*x)
        # print(s)
        if s>=0.5:
            return 1
        else:
            return 0

if __name__ =="__main__":
    lore = Logistic_Regression(x_train,y_train,500,0.003,len(y_train))
    lore.train()
    # for i in range(100):
    # print(lore.predict(np.array([70, 60])))
    print(lore.predict(np.array([40, 40])))
