# -*- coding: utf-8 -*-
"""
Created on Thu Aug 29 15:40:49 2019

@author: zhangjuefei
"""

from sklearn.metrics import accuracy_score, roc_auc_score, classification_report, roc_curve, precision_recall_curve
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import OneHotEncoder
import pandas as pd
import numpy as np
import os

from layer import *
from node import *
from optimizer import *

seq_len = 30  # 序列长度
dimension = 20  # 序列每一步的向量维度
hidden_dim = 40  # RNN 时间单元的输出维度

# 获取两类手机拍击信号
ROOT = "D:/documents/360_work/拍击手机"
data = pd.read_csv(os.path.join(ROOT, "data.csv"), header=None, engine="python")
train_x, test_x, train_y, test_y = train_test_split(data.loc[:,:599], data[600])
oh = OneHotEncoder(sparse=False, categories='auto')
train_y = oh.fit_transform(train_y.values.reshape(-1, 1))
test_y = oh.transform(test_y.values.reshape(-1, 1))
train_x = train_x.values
test_x = test_x.values

# 时间序列变量，每一步一个 dimension 维向量（Variable 节点），保存在数组 input 中
input_vectors = []
for i in range(seq_len):
    input_vectors.append(Variable(dim=(dimension, 1), init=False, trainable=False))
    
# 对于本步输入的权值矩阵
W = Variable(dim=(hidden_dim, dimension), init=True, trainable=True)

# 对于上步输入的权值矩阵
Y = Variable(dim=(hidden_dim, hidden_dim), init=True, trainable=True)

# 偏置向量
b = Variable(dim=(hidden_dim, 1), init=True, trainable=True)

# 构造 RNN
last_step = None  # 上一步的输出，第一步没有上一步，先将其置为 None
for iv in input_vectors:
    y = Add(MatMul(W, iv), b)

    if last_step is not None:
        y = Add(MatMul(Y, last_step), y)

    y = ReLU(y)

    last_step = y


fc1 = fc(y, hidden_dim, 16, "ReLU")  # 第一全连接层
fc2 = fc(fc1, 16, 2, "None")  # 第二全连接层

# 分类概率
prob = SoftMax(fc2)

# 训练标签
label = Variable((2, 1), trainable=False)

# 交叉熵损失
loss = CrossEntropyWithSoftMax(fc2, label)

# Adam 优化器
optimizer = Adam(default_graph, loss, 0.0001, batch_size=8)

# 训练
print("start training", flush=True)

finish = False
for e in range(1000):

    for i in range(len(train_x)):
        x = np.mat(train_x[i, :]).reshape(dimension, seq_len)
        for j in range(seq_len):
            input_vectors[j].set_value(x[:, j])
        label.set_value(np.mat(train_y[i, :]).T)

        # 执行一步优化
        optimizer.one_step()

        if i > 1 and (i + 1) % 500 == 0:

            # 在测试集上评估模型正确率
            probs = []
            losses = []
            for j in range(len(test_x)):
                # x = test_x[j, :].reshape(dimension, seq_len)
                x = np.mat(test_x[j, :]).reshape(dimension, seq_len)
                for k in range(seq_len):
                    input_vectors[k].set_value(x[:, k])
                label.set_value(np.mat(test_y[j, :]).T)

                # 前向传播计算概率
                prob.forward()
                probs.append(prob.value.A1)

                # 计算损失值
                loss.forward()
                losses.append(loss.value[0, 0])

                # print("test instance: {:d}".format(j))

            # 取概率最大的类别为预测类别
            probs = np.array(probs)
            pred = np.argmax(probs, axis=1)
            truth = np.argmax(test_y, axis=1)
            accuracy = accuracy_score(truth, pred)
            auc = roc_auc_score(truth, probs[:, 1])

            # default_graph.draw()
            print("epoch: {:d}, iter: {:d}, loss: {:.3f}, accuracy: {:.2f}%, auc: {:.2f}".format(e + 1, i + 1, np.mean(losses),
                                                                                    accuracy * 100, auc), flush=True)
            
            if auc > 0.9 and accuracy > 0.9:
                finish = True
                break
                pass

    if finish:
        break


### 保存模型参数
nodes =  [node for node in default_graph.nodes if isinstance(node, Variable) and node.trainable]
nodes[0].value.tofile("W.bin")
nodes[1].value.tofile("Y.bin")
nodes[2].value.tofile("b.bin")
nodes[3].value.tofile("W1.bin")
nodes[4].value.tofile("b1.bin")
nodes[5].value.tofile("W2.bin")
nodes[6].value.tofile("b2.bin")
    
            
print(classification_report(truth, pred))
fpr, tpr, th = roc_curve(truth, probs[:,1])
pd.DataFrame({"false positive rate":fpr, "true positive rate":tpr}).plot(x="false positive rate", y="true positive rate", figsize=(6, 6), grid=True)
    
p, r, t = precision_recall_curve(truth, probs[:,1])
pd.DataFrame({"precision":p[:-1], "recall":r[:-1], "threshold":t}).plot(x="threshold", y=["precision", "recall"], figsize=(6, 6), grid=True)
    


import matplotlib.pyplot as plt
fig = plt.figure(figsize=(12, 12))
for i in range(9):
    ax = fig.add_subplot(331 + i)
    rand = np.random.randint(len(test_x))
    pd.DataFrame(test_x[rand].reshape(20, 30).T).plot(
            grid=True, 
            title="{:d}: {:s}/{:s}".format(rand, 
                   "UNSAVE" if truth[rand] else "SAVE", 
                   "UNSAVE" if pred[rand] else "SAVE"), 
            ax = ax,
            legend=False)
    