#导包
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt

#忽略警告
import warnings
warnings.filterwarnings('ignore')
#避免画图乱码问题
plt.rcParams['font.sans-serif']=['SimHei']
plt.rcParams['axes.unicode_minus']=False

#导入数据
train = pd.read_csv('./田字型散点/train.csv',header=None)
test = pd.read_csv('./田字型散点/test.csv',header=None)

x_train=train.iloc[0:2,:].T
y_train=train.iloc[-1,:].T
x_test=test.iloc[0:2,:].T
y_test=test.iloc[-1,:].T

X = x_train
X.insert(0,'ones',1) #加入偏置神经元
y_test = y_test.values.reshape((len(y_test),1))
y_train = y_train.values.reshape((len(y_train),1))

#创建初始值
inputSize = 3
hiddenSize = 8
outputSize = 1

w1 = np.random.rand(hiddenSize,inputSize)
w2 = np.random.rand(outputSize,hiddenSize+1)

'''前向传播'''
#激活函数
def sigmoid(z):
    hx = 1 / (1 + np.exp(-z))
    return hx

#创建前向传播函数
def forward(X,w1,w2):
    a1 = X
    z2 = np.dot(X,w1.T)
    a2 = sigmoid(z2)
    a2 = np.insert(a2,0,1,axis=1)
    z3 = np.dot(a2,w2.T)
    a3 = sigmoid(z3)
    return a1,z2,a2,z3,a3

def Jx(X,y,w1,w2):
    a1,z2,a2,z3,h = forward(X,w1,w2)
    left = y*np.log(h)
    right = (1-y)*np.log(1-h)
    return -np.sum(left+right)/len(X)

'''反向传播'''
def sigmoid_gradient(z):
    return sigmoid(z)*(1-sigmoid(z))

# 创建反向传播函数
def backward(X, y, w1, w2):
    a1, z2, a2, z3, h = forward(X, w1, w2)
    delta3 = h - y  # 163*1
    delta2 = delta3.dot(w2[:, 1:]) * sigmoid_gradient(z2)

    D2 = delta3.T.dot(a2) / len(X)
    D1 = delta2.T.dot(a1) / len(X)
    return D1, D2

D1, D2 = backward(X, y_train, w1, w2)

'''训练神经网络'''
h_error = []
def train(X,y,w1,w2,alpha=0.5,epochs=10001):
    for epoch in range(epochs):
        _,_,_,_,h = forward(X,w1,w2)
        error = h-y
        h_error.append(np.abs(error).mean())
        D1,D2 = backward(X,y_train,w1,w2)
        w1 = w1-alpha*D1
        w2 = w2-alpha*D2
    return w1,w2

W1,W2=train(X,y_train,w1,w2)

#展现MSE
plt.plot(range(len(h_error)),h_error)
plt.title('Mean Sum Squared Loss')
plt.xlabel('Epoch')
plt.ylabel('Loss')
#plt.show()

'''预测/测试'''
def predict(x_test,W1,W2):
    _,_,_,_,fx = forward(x_test,W1,W2)
    return fx

x_test.insert(0,'ones',1) #加入偏置神经元
y_pre = predict(x_test,W1,W2)
for i in range(len(y_pre)):
    if y_pre[i] > 0.5:
        y_pre[i] = 1
    else:
        y_pre[i] =0

'''评估算法'''
# 混淆矩阵
TP, FN, FP, TN = 0, 0, 0, 0
for i in range(len(y_pre)):
    if y_pre[i] == 1 and y_test[i] == 1:
        TP += 1
    elif y_pre[i] == 0 and y_test[i] == 1:
        FN += 1
    elif y_pre[i] == 1 and y_test[i] == 0:
        FP += 1
    elif y_pre[i] == 0 and y_test[i] == 0:
        TN += 1

print("TP：", TP)
print("FN：", FN)
print("FP：", FP)
print("TN：", TN)


def assess(TP, FN, FP, TN):
    # 准确率
    Accuracy = (TP + TN) / (TP + FP + TN + FN)
    # 精确率
    Precision = TP / (TP + FP)
    # 召回率
    Recall = TP / (TP + FN)
    # F1
    F1 = (2 * Precision * Recall) / (Precision + Recall)

    print("Accuracy：", Accuracy)
    print("Precision：", Precision)
    print("Recall：", Recall)
    print("F1：", F1)
    return

assess(TP, FN, FP, TN)