import torch
import torch.nn as nn
import torch.optim as optim
from args import get_args
from dataloader.dataload import getDataloader
from model.Unet_model import Unet_model,Csp_unet_model
from tqdm import tqdm
from model.utils import ConvBlock, UpSampling, DownSampling
import numpy as np
import logging
import torch.nn.functional as F
from loss.loss import dice_loss, focal_loss, MIOU, rec_criterion,MIOU_CAL
from show_result import show_graph, save_data
import os
from model.csp_module import C3, Conv, dark_module
# 表示打印该级别以上的所有信息
logging.basicConfig(level=logging.INFO)


def main(args):
    # 根据不同的txt文件加载不同的数据
    train_loader = getDataloader(root=args.data_path + '/data',
                                 txtfile_path=args.data_path + '/train.txt',
                                 shuffle=True,
                                 BatchSize=args.batch_size,
                                 num_workers=args.num_workers)
    val_loader = getDataloader(root=args.data_path + '/data',
                               txtfile_path=args.data_path + '/val.txt',
                               shuffle=False,
                               BatchSize=args.batch_size,
                               num_workers=args.num_workers)
    if args.backbone=='CSP_Dark':
        model = Csp_unet_model(ConvBlock, C3, Conv, UpSampling, base_channels=64, base_depth=1).to(args.device)
    elif args.backbone=='unet_backbone':
        model = Unet_model(ConvBlock, DownSampling, UpSampling).to(args.device)

    optimizer = optim.Adam(model.parameters(), lr=args.lr, weight_decay=0.0005)
    if args.LR_schedule is not None:
        lr_schedule = optim.lr_scheduler.ExponentialLR(optimizer=optimizer, gamma=0.95)
    if args.CEL:
        criterion = nn.CrossEntropyLoss()
    else:
        criterion = rec_criterion
    # 训练过程
    logging.info(f'''start to train......
                 epoches        :  {args.epoches}
                 batch_size     :  {args.batch_size}
                 learning_rate  :  {args.lr}
                 num_workers    :  {args.num_workers}
                 device         :  {args.device}
                 lr_scheduler   :  {args.LR_schedule}
                 CEL            :  {args.CEL}
                ''')

    Train_Loss = []
    Val_Loss = []
    Train_Miou = []
    Val_Miou = []
    for epoch in range(0, args.epoches):
        model.train()
        train_loss_list = []
        train_miou_list = []
        with tqdm(total=len(train_loader), mininterval=3)as pbar:
            for i, (img, mask) in enumerate(train_loader):
                optimizer.zero_grad()
                img = img.to(args.device)
                mask = mask.to(args.device)
                with torch.cuda.amp.autocast(enabled=args.amp):
                    output = model(img)
                    loss = criterion(output, mask) + dice_loss(F.softmax(output, dim=1).float(),
                                                               F.one_hot(mask, 3).permute(0, 3, 1, 2).float(),
                                                               multiclass=True)
                train_miou_cal = MIOU_CAL(3,torch.argmax(F.softmax(output, dim=1).float(),dim=1), mask.float())
                train_miou=train_miou_cal.miou
                loss.backward()
                optimizer.step()
                pbar.update()
                train_loss_list.append(loss.cpu().data.numpy())
                train_miou_list.append(train_miou.cpu().data.numpy())
            if args.LR_schedule is not None:
                lr_schedule.step()
            train_mean_loss = np.array(train_loss_list).mean()
            train_mean_miou = np.array(train_miou_list).mean()

        with torch.no_grad():
            model.eval()
            with tqdm(total=len(val_loader), mininterval=1)as pbar:
                val_loss_list = []
                val_miou_list = []
                best_miou = 0.0
                for i, (img, mask) in enumerate(val_loader):
                    img = img.to(args.device)
                    mask = mask.to(args.device)
                    output = model(img)
                    loss = criterion(output, mask) + dice_loss(F.softmax(output, dim=1).float(),
                                                               F.one_hot(mask, 3).permute(0, 3, 1, 2).float(),
                                                               multiclass=True)
                    val_miou_cal = MIOU_CAL(3,torch.argmax(F.softmax(output, dim=1).float(),dim=1), mask.float())
                    val_miou=val_miou_cal.miou
                    val_loss_list.append(loss.cpu().data.numpy())
                    val_miou_list.append(val_miou.cpu().data.numpy())
                    pbar.update()

                val_mean_loss = np.array(val_loss_list).mean()
                val_mean_miou = np.array(val_miou_list).mean()

        logging.info(
            f"""epoch[{epoch + 1}/{args.epoches}] train_loss:{train_mean_loss:.4f} val_loss:{val_mean_loss:.4f} 
train_miou:{train_mean_miou:.4f} val_miou:{val_mean_miou:.4f}""")

        ckpt_path = os.path.join(args.save_path, 'checkpoint')
        if not os.path.exists(ckpt_path):
            os.makedirs(ckpt_path)

        if val_mean_miou > best_miou:
            best_miou = val_mean_miou
        if best_miou > 0.97:
            torch.save(model.state_dict(), f"{ckpt_path}/best_miou_{best_miou:.4f}.pth")
        Train_Loss.append(train_mean_loss)
        Train_Miou.append(train_mean_miou)
        Val_Loss.append(val_mean_loss)
        Val_Miou.append(val_mean_miou)
    curve_path = os.path.join(args.save_path, 'curve')
    if not os.path.exists(curve_path):
        os.makedirs(curve_path)
    epoch_list = list(range(1, args.epoches + 1))
    # 绘制并保存loss曲线
    show_graph(curve_path, epoch_list, Train_Loss, Val_Loss, "unet", "loss", "epoch", "loss",
               ["train_loss", "val_loss"])
    # 绘制并保存miou曲线
    show_graph(curve_path, epoch_list, Train_Miou, Val_Miou, "unet", "miou", "epoch", "miou",
               ["train_miou", "val_miou"])
    # 保存相应的数据到txt文件
    save_data(curve_path, Train_Loss, Val_Loss, Train_Miou, Val_Miou, "unet")


if __name__ == '__main__':
    args = get_args()
    args.epoches = 25
    args.CEL = False
    args.batch_size=2
    args.LR_schedule = "ExponentialLR"
    args.save_path = f"result/{args.epoches}epoches-{args.LR_schedule}_lrs-{args.CEL}_cel_CSPNET_0.2"
    if not os.path.exists(args.save_path):
        os.makedirs(args.save_path)
    main(args)
