from CNN_NET import *
from MyDataSet import MyDataSet
from MyDataSet import EmptyDataSet

import os
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import random_split
import matplotlib.pyplot as plt
from datetime import date
from datetime import datetime
import pandas as pd

test_rate = 0.7  # 训练集占所有数据集的百分比，要求数据集是以10为起准

def kind_to_index(kind_array,KindIndexMap):
    ret = []
    for kind in kind_array:
        ret.append(KindIndexMap[kind])
    return torch.tensor(ret)

# 获取数据的训练集和测试集
def get_dataloader(rroot_path=r"Data"):
    # 0. 初始化变量
    label_list = []
    TrainDataSet = EmptyDataSet()
    TestDataSet = EmptyDataSet()

    # 1. 获取文件
    root_list = os.listdir(rroot_path)
    for root_name in root_list:  # root_name = 2024.3.6
        if root_name == "Picture" or root_name == "原始数据备份":
            continue
        root_path = os.path.join(rroot_path, root_name)  # 日期的文件目录
        temp_label_list = os.listdir(root_path)  # 日期目录下的所有文件
        for label in temp_label_list:
            if label != "background.xlsx":
                label_list.append(label)
                TempDataSet = MyDataSet(root_path, label)  # 要将这个标签下的集合按照比例，随机分为训练集和测试集
                SetLen = TempDataSet.__len__()
                TempTrain, TempTest = random_split(TempDataSet,
                                                   [int(SetLen * test_rate), int(SetLen - int(SetLen * test_rate))])
                TrainDataSet += TempTrain
                TestDataSet += TempTest

    # 2. 创建网络，并且创建网络输出节点的映射图
    net_map, KindIndexMap = creat_net_map(label_list)  # 输出节点映射图
    TrainLoader = get_data_loader(TrainDataSet)  # 训练数据集  ### !!! 当打包数据集大小＞1时，有问题
    TestLoader = get_data_loader(TestDataSet)  # 测试数据集
    return TrainLoader, TestLoader, net_map, KindIndexMap

"""
训练的主函数
"""
def net_main(ecount):
    #0. 初始化变量
    learn_rate = 0.001 #学习率
    epoch_count = ecount #训练轮次
    test_rate = 0.7 # 训练集占所有数据集的百分比，要求数据集是以10为起准
    acc_list = [] #训练准确率列表
    loss_list = [] #训练损失列表
    sample_count = 101 #总数目
    train_count = int(sample_count*test_rate)
    test_count = sample_count - train_count

    # 1. 获取所有菌种类别和映射图
    TrainLoader,TestLoader,net_map,KindIndexMap = get_dataloader()

    #2. 创建网络
    net = MyNet(len(net_map), 51,47) # 训练网络

    ##3. 开始训练
    train_loss = 0
    for (x, y) in TrainLoader:
        output = net.forward(x)  # 卷积需要输入二维矩阵
        # 创建真值矩阵(需要将y中的每个品种转换为数字)
        real = kind_to_index(y, KindIndexMap)
        loss = F.nll_loss(output, real)
        train_loss = train_loss + loss

    train_loss = train_loss/train_count
    acc= evaluate(TrainLoader, net, net_map)
    acc_list.append(acc)
    loss_list.append(float(train_loss))
    print("Inital accuracy:", acc, "Inital loss: ", train_loss)
    optimizer = torch.optim.Adam(net.parameters(),lr = learn_rate)

    for epoch in range(epoch_count):
        total_loss = 0
        for (x,y) in TrainLoader:
            net.zero_grad() #初始化
            output = net.forward(x) #卷积需要输入二维矩阵
            #创建真值矩阵(需要将y中的每个品种转换为数字)
            real = kind_to_index(y,KindIndexMap)
            loss = F.nll_loss(output,real)
            loss.backward()
            optimizer.step()
            total_loss = total_loss + loss
        acc = evaluate(TrainLoader, net, net_map)
        acc_list.append(acc)
        loss_list.append(float(total_loss/train_count))
        print("epoch:",epoch+1," accuracy:",acc,"loss:",total_loss/train_count)
        print("----------------------------------------------------------------------")

    # 4. 验证训练结果
    print("---------------------------------------")
    print("------ Start verifying test set -------")
    test_loss = 0
    for (x, y) in TestLoader:
        output = net.forward(x)  # 卷积需要输入二维矩阵
        # 创建真值矩阵(需要将y中的每个品种转换为数字)
        real = kind_to_index(y, KindIndexMap)
        loss = F.nll_loss(output, real)
        test_loss = test_loss + loss
    test_loss = test_loss/test_count
    test_acc = evaluate(TestLoader, net, net_map)
    print("Final loss:",test_loss)
    test_grades = get_grades(TestLoader,net,KindIndexMap)
    grades_list = test_grades.get_average()
    # 获取当前时间
    time_now = datetime.now().strftime("%Y-%m-%d_%H.%M.%S") #保存文件的名字
    file_name = "E"+str(epoch_count)+"_"+time_now
    #保存混淆矩阵
    test_grades.save_conf_mar(file_name)

    #6. 绘制ROC曲线
    draw_ROC(TestLoader,net,KindIndexMap,net_map,file_name)

    # 7. 绘制准确率图像和损失图像
    x = range(len(acc_list))
    plt.figure(figsize=(12.8,7.2))
    plt.plot(x,acc_list,"o-",color="green",linewidth=2)
    plt.xticks(fontsize=20)  # x轴刻度标签字体大小
    plt.yticks(fontsize=20)  # y轴刻度标签字体大小
    plt.xlabel('Epoch', fontdict={"family": "Times New Roman"}, fontsize=32)
    plt.ylabel('Accuracy', fontdict={"family": "Times New Roman"}, fontsize=32)
    plt.title("Model Accuracy",fontdict={"family": "Times New Roman"},fontsize=36)
    plt.savefig("RunningData\\AccLossPic\\Acc_"+str(epoch_count)+"_"+str(time_now)+".png")
    plt.figure(figsize=(12.8,7.2))
    plt.plot(x,loss_list,"o-",color="red",linewidth=2)
    plt.xticks(fontsize=20)  # x轴刻度标签字体大小
    plt.yticks(fontsize=20)  # y轴刻度标签字体大小
    plt.xlabel('Epoch', fontdict={"family": "Times New Roman"}, fontsize=32)
    plt.ylabel('Loss', fontdict={"family": "Times New Roman"}, fontsize=32)
    plt.title("Model Loss",fontdict={"family": "Times New Roman"},fontsize=36)
    plt.savefig("RunningData\\AccLossPic\\Loss_"+str(epoch_count)+"_"+str(time_now)+".png")

    # 8.保存训练的模型
    torch.save(net,r"Model\\"+file_name+".pth")
    return {"Running Time":time_now,"Epoch Count":epoch_count,"Last Train Accuracy":acc_list[-1],"Last Train Mean Loss":loss_list[-1],"Test Accuracy":float(grades_list[0]),"Test Mean Loss":float(test_loss),"Test Precison":grades_list[1],"Test Recall":grades_list[2],"Test F1 Grades":grades_list[3]}

#运行日志对象，将所跑的结果记录到对应文件
class RunLogType:
    def __init__(self,path = r"D:\Code\CNN识别菌种\RunningData\RunData.xlsx"):
        self.log = pd.read_excel(path)

    #返回有多少个数据
    def _count(self):
        return self.log.shape[0]

    #添加数据行
    def _append(self,data_dir):
        data_dir["ID"] = self._count()+1
        self.log = self.log._append(data_dir,ignore_index=True)
        self._save()

    def _save(self,save_path = r"D:\Code\CNN识别菌种\RunningData\RunData.xlsx"):
        self.log.to_excel(save_path,index=False)

def main():
    Runlog = RunLogType()
    epoch_list = [20]
    time = 1
    for epoch in epoch_list:
        for i in range(time):
            Runlog._append(net_main(epoch))

# main()

"""
目前代码存在的问题：
1.关于数据集方面：
a. 数据没有归一化，有的时候荧光值可能大(但是浓度控制在0.2，应该还不错)，可能需要归一化
b. 数据集太少，20个不够看

2.代码方面，目前存在BUG
a. 如果我输入的图换个矩阵格式，代码是否还能够看?
b. 批量为1代码可以正常运行，但是批量为1以上的时候出现BUG

2024年4月4日16:15:54
测试效果：完全ok，可能数据集不够，还需要收集更多的数据`
"""