import numpy as np
from sklearn import datasets, linear_model
import matplotlib.pyplot as plt
from sklearn.neural_network import MLPClassifier


with open("dataset_circles.csv", 'r') as f:  #打开文件
    data = f.readlines()  #读第一行


X = np.zeros((len(data), 2))
y = np.zeros(len(data))
for i in range(len(data)):
	X[i][0] = data[i].split(',')[0]
	X[i][1] = data[i].split(',')[1]
	y[i] = data[i].split(',')[2]


print(X , y)




# generate nn output target
t = np.zeros((X.shape[0], 2))
t[np.where(y==0), 0] = 1
t[np.where(y==1), 1] = 1
print(t)

# # plot data
# plt.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.Spectral)
# plt.show()


################the NN model in jupyter notebook###########
# generate the NN model
class NN_Model:
    epsilon = 0.01               # learning rate
    n_epoch = 1000               # iterative number
    
nn = NN_Model()
nn.n_input_dim = X.shape[1]      # input size
nn.n_output_dim = 2              # output node size
nn.n_hide_dim = 4                # hidden node size

nn.X = X
nn.y = y

# initial weight array
nn.W1 = np.random.randn(nn.n_input_dim, nn.n_hide_dim) / np.sqrt(nn.n_input_dim)
# print(np.random.randn(nn.n_input_dim, nn.n_hide_dim))
nn.b1 = np.zeros((1, nn.n_hide_dim))
nn.W2 = np.random.randn(nn.n_hide_dim, nn.n_output_dim) / np.sqrt(nn.n_hide_dim)
nn.b2 = np.zeros((1, nn.n_output_dim))

# define sigmod & its derivate function
def sigmod(X):
    return 1.0/(1+np.exp(-X))

def sigmod_derivative(X):
    f = sigmod(X)
    return f*(1-f)

# network forward calculation
def forward(n, X):
    n.z1 = sigmod(X.dot(n.W1) + n.b1)
    n.z2 = sigmod(n.z1.dot(n.W2) + n.b2)
    return n


# use random weight to perdict
# forward(nn, X)
# y_pred = np.argmax(nn.z2, axis=1) #输出每行的的最大值的下标的array, axis=0时，输出每一列最大值的下标的array
# print(y_pred, nn.z2)
# # plot data
# plt.scatter(X[:, 0], X[:, 1], c=y_pred, cmap=plt.cm.Spectral)
# plt.show()


from sklearn.metrics import accuracy_score

y_true = np.array(nn.y).astype(float)

# back-propagation
def backpropagation(n, X, y):
    for i in range(n.n_epoch):
        # forward to calculate each node's output
        forward(n, X)
        
        # print loss, accuracy
        L = np.sum((n.z2 - y)**2)
        
        y_pred = np.argmax(nn.z2, axis=1)
        acc = accuracy_score(y_true, y_pred)
        
        print("epoch [%4d] L = %f, acc = %f" % (i, L, acc))
        
        # calc weights update
        d2 = n.z2*(1-n.z2)*(y - n.z2)
        d1 = n.z1*(1-n.z1)*(np.dot(d2, n.W2.T))
        
        # update weights
        n.W2 += n.epsilon * np.dot(n.z1.T, d2)
        n.b2 += n.epsilon * np.sum(d2, axis=0)
        n.W1 += n.epsilon * np.dot(X.T, d1)
        n.b1 += n.epsilon * np.sum(d1, axis=0)

nn.n_epoch = 2000
backpropagation(nn, X, t)


# plot data
y_pred = np.argmax(nn.z2, axis=1)

plt.figure('ground truth')
plt.scatter(X[:, 0], X[:, 1], c=nn.y, cmap=plt.cm.Spectral)
plt.title("ground truth")

plt.figure('predicted of Mr.Bu')
plt.scatter(X[:, 0], X[:, 1], c=y_pred, cmap=plt.cm.Spectral)
plt.title("predicted")
# plt.show()



#############using sklearn.neural_network.MLPClassifier############
model = MLPClassifier().fit(X, y)
y_pred_MLP = model.predict(X)

print('accuracy of Mr.Bu:    ', accuracy_score(y_true, y_pred))
print('accuracy of sklearn:', accuracy_score(y_true, y_pred_MLP))

# print(model.predict(X))
plt.figure('predicted of sklearn')
plt.scatter(X[:, 0], X[:, 1], c=y_pred_MLP, cmap=plt.cm.Spectral)
plt.title("predicted of sklearn")

plt.show()

