import numpy
from sklearn.datasets import make_circles
import pandas
data, y = make_circles(n_samples=500, noise=0.05, random_state=42,factor=0.1)
import matplotlib.pyplot as plt

# 可视化数据
plt.scatter(data[:, 0], data[:, 1],c=y)
plt.title("Moon-shaped Dataset")
plt.xlabel("Feature 1")
plt.ylabel("Feature 2")
plt.show()
def sigmoid(x):
    return 1/(1+numpy.exp(-x))
data=numpy.hstack((data,y.reshape(-1,1)))
#data=numpy.loadtxt("./testSet.txt",delimiter='\t')
# 数据处理
# data=pandas.read_table("./datingTestSet.txt",sep="\t",header=None,na_filter=False)
# dic={'largeDoses':1, 'smallDoses':2, 'didntLike':0}
# data.iloc[:,-1]=data.iloc[:,-1].map(dic)
# data=data.values[:,[0,1,3]]
# data=data[data[:,-1]!=2]
# feature=data[:,0:-1]
# #将feature进行归一化处理
# feature=(feature-feature.min(axis=0))/(feature.max(axis=0)-feature.min(axis=0))
# data=numpy.hstack((feature,data[:,-1].reshape((-1,1))))
# data=data.astype(float)
# feature=data[:,0:-1]
# label=data[:,-1]
n=len(data)
def regression(w,b,epoch,eta,p):
    weaklearner={}
    for i in range(epoch):
        #temp=p*(sigmoid(numpy.dot(data[:,0:-1],w)+b)-data[:,-1])
        temp = 2*p*(sigmoid(numpy.dot(data[:,0:-1], w) + b) - data[:,-1]) * sigmoid(numpy.dot(data[:,0:-1], w) + b) * \
               (1 - sigmoid(numpy.dot(data[:,0:-1], w) + b))
        dw=numpy.dot(temp,data[:,0:-1])
        db=numpy.sum(temp)
        w = w - eta * dw
        b = b - eta * db
        # res = sigmoid(numpy.dot(data[:,0:-1], w) + b)
        # res = numpy.where(res < 0.5, 0, 1)
        # percent = numpy.sum(res == data[:,-1]) / n
    weaklearner["w"] = w
    weaklearner["b"] = b
    h = sigmoid(numpy.dot(data[:,0:-1], w.T) + b)
    return h,weaklearner
w=numpy.random.randn(2)
b=numpy.random.randn(1)
weaklearners_num=2000#弱分类器的个数
epoch=2000#弱分类器训练次数
eta=0.1
#w,b=regression(w,b,1000,0.0001)
m,n=data[:,0:-1].shape
weight=numpy.ones((m))/m#定义初始权重
aggregate_predict=numpy.empty((weaklearners_num,m))
aggregate_beta=numpy.empty(weaklearners_num)
weak_learners=[]
for i in range(weaklearners_num):
    p=weight/numpy.sum(weight)#权重转化为数据分布
    h,weaklearner=regression(w, b,epoch,eta,p*1000)
    #计算beta
    Error=numpy.dot(p,numpy.abs(h-data[:,-1]))
    aggregate_predict[i,:]=h#记录每个弱学习器的预测结果
    beta=Error/(1-Error)
    aggregate_beta[i]=numpy.log(1/beta)
    #在beta和预测值的基础上计算新的weight
    weight=weight*beta**(1-numpy.abs(h-data[:,-1]))#注意权重更新时的不同
    #计算综合结果
    res=numpy.where(numpy.sum(aggregate_predict[0:i+1,:]*aggregate_beta[0:i+1].reshape(-1,1),axis=0)>=(0.5*numpy.sum(aggregate_beta[0:i+1])),1,0)
    weak_learners.append(weaklearner)
    print(numpy.sum(res==data[:,-1])/m,Error)



#1.每个弱神经网络要得到充分训练   2.增加网路复杂度  3.权重缩放因子