import csv
import os
import time

import numpy as np
import torch
import torch.nn.functional as F
from models.loss import dice_loss, focal_loss, sad_loss, binary_dice_loss, binary_focal_loss
from scipy.ndimage.interpolation import zoom
from torch.cuda import empty_cache
from tqdm import tqdm

from utils.ShangJiao_Utils import save_itk, dice_coef_np, ppv_np, \
    sensitivity_np, acc_np, combine_total_avg, combine_total, \
    normalize_min_max

th_bin = 0.5


def get_lr(epoch, args):
    """
    :param epoch: current epoch number
    :param args: global arguments args
    :return: learning rate of the next epoch
    """
    if args.lr is None:
        assert epoch <= args.lr_stage[-1]
        lrstage = np.sum(epoch > args.lr_stage)
        lr = args.lr_preset[lrstage]
    else:
        lr = args.lr
    return lr


def train_casenet(epoch, model, data_loader, optimizer, args, save_dir):
    """
    :param epoch: current epoch number
    :param model: CNN model
    :param data_loader: training data
    :param optimizer: training optimizer
    :param args: global arguments args
    :param save_dir: save directory
    :return: performance evaluation of the current epoch
    """
    model.train()
    sidelen = args.stridet
    margin = args.cubesize
    starttime = time.time()
    lr = get_lr(epoch, args)
    for param_group in optimizer.param_groups:
        param_group['lr'] = lr
    assert (lr is not None)
    optimizer.zero_grad()

    lossHist = []
    lenHist = []

    dice_total = {}
    ppv_total = {}
    acc_total = {}
    dice_hard_total = {}
    sensitivity_total = {}

    dice_total_bw = []
    ppv_total_bw = []
    acc_total_bw = []
    dice_hard_total_bw = []
    sensitivity_total_bw = []

    traindir = os.path.join(save_dir, 'train')
    if not os.path.exists(traindir):
        os.mkdir(traindir)
    training_log = os.path.join(traindir, 'train_log.txt')
    #################
    for i, (x, y, coord, ybw, ymask, airway, org, spac, NameID, SplitID, nzhw, ShapeOrg) in enumerate(
            tqdm(data_loader)):
        ######Wrap Tensor##########
        NameID = NameID[0]
        SplitID = SplitID[0]
        batchlen = x.size(0)
        x = x.cuda()
        y = y.cuda()
        ###############################
        coord = coord.cuda()
        ybw = ybw.cuda()
        maskdata = ymask.numpy()
        ymask = ymask.cuda()
        ymask.requires_grad = False

        casePreds, vesselbw, attentions = model(x, coord)

        loss = binary_dice_loss(vesselbw, ybw)

        # loss += binary_cross_entropy(vesselbw, ybw)
        loss += binary_focal_loss(vesselbw, ybw)

        if args.deepsupervision:
            casePred, ds6, ds7, ds8 = casePreds[0], casePreds[1], casePreds[2], casePreds[3]

            dceloss = dice_loss(casePred, y, mask=ymask) + \
                      dice_loss(ds6, y, mask=ymask) + \
                      dice_loss(ds7, y, mask=ymask) + \
                      dice_loss(ds8, y, mask=ymask)

            # celoss = cross_entropy(casePred, y, mask=ymask) + \
            # cross_entropy(ds6, y, mask=ymask) + \
            # cross_entropy(ds7, y, mask=ymask) + \
            # cross_entropy(ds8, y, mask=ymask)

            fcloss = focal_loss(casePred, y, mask=ymask) + \
                     focal_loss(ds6, y, mask=ymask) + \
                     focal_loss(ds7, y, mask=ymask) + \
                     focal_loss(ds8, y, mask=ymask)

        else:
            casePred = casePreds

            dceloss = dice_loss(casePred, y, mask=ymask)

            # celoss = cross_entropy(casePred, y, mask=ymask)

            fcloss = focal_loss(casePred, y, mask=ymask)

        if epoch > 10:
            loss *= 0.1  # scale ratio of the binary vessel segmentation loss

            if dceloss is not None:
                loss += dceloss
            # if celoss is not None:
            # 	loss += celoss
            if fcloss is not None:
                loss += fcloss

        if args.sadencoder == 1:
            # attentions 0, 1, 2
            gamma_sad = [0.1, 0.1, 0.1]
            for iter_sad in range(2):
                loss += (gamma_sad[iter_sad]) * sad_loss(attentions[iter_sad], attentions[iter_sad + 1],
                                                         encoder_flag=True)

        if args.saddecoder == 1:
            # attentions 3, 4, 5, 6
            gamma_sad = [0.2, 0.3, 0.4]
            for iter_sad in range(3, 6):
                loss += (gamma_sad[iter_sad - 3]) * sad_loss(attentions[iter_sad], attentions[iter_sad + 1],
                                                             encoder_flag=False)

        optimizer.zero_grad()
        loss.backward()
        optimizer.step()

        # for evaluation
        casePred = F.softmax(casePred, dim=1)
        lossHist.append(loss.item())
        lenHist.append(batchlen)

        #####################segmentation calculating metrics#######################
        outdata = casePred.cpu().data.numpy()
        segdata = y.cpu().data.numpy()
        outdatabw = np.argmax(outdata, axis=1)  # argmax of multi-category classification
        vesseldata = vesselbw.cpu().data.numpy()
        vesseldatabw = (vesseldata > th_bin)
        ybwdata = ybw.cpu().data.numpy()

        for j in range(batchlen):
            # multi-category artery/vein classification
            for curcategory in range(1, 3):
                segpred = (outdatabw[j] == curcategory)
                segpred = segpred.astype('float')
                segpred = segpred * maskdata[j, 0]
                segpredorg = outdata[j, curcategory] * maskdata[j, 0]
                curgt = (segdata == curcategory)
                dice = dice_coef_np(segpredorg, curgt[j, 0])
                ppv = ppv_np(segpred, curgt[j, 0])
                sensiti = sensitivity_np(segpred, curgt[j, 0])
                acc = acc_np(segpred, curgt[j, 0])
                dicehard = dice_coef_np(segpred, curgt[j, 0])
                ##########################################################################
                if not (curcategory in dice_total):
                    dice_total[curcategory] = []
                dice_total[curcategory].append(dice)
                if not (curcategory in ppv_total):
                    ppv_total[curcategory] = []
                ppv_total[curcategory].append(ppv)
                if not (curcategory in sensitivity_total):
                    sensitivity_total[curcategory] = []
                sensitivity_total[curcategory].append(sensiti)
                if not (curcategory in acc_total):
                    acc_total[curcategory] = []
                acc_total[curcategory].append(acc)
                if not (curcategory in dice_hard_total):
                    dice_hard_total[curcategory] = []
                dice_hard_total[curcategory].append(dicehard)
            # binary category vessel segmentation
            dice = dice_coef_np(vesseldata[j, 0], ybwdata[j, 0])
            ppv = ppv_np(vesseldatabw[j, 0], ybwdata[j, 0])
            sensiti = sensitivity_np(vesseldatabw[j, 0], ybwdata[j, 0])
            acc = acc_np(vesseldatabw[j, 0], ybwdata[j, 0])
            dicehard = dice_coef_np(vesseldatabw[j, 0], ybwdata[j, 0])
            ##########################################################################
            dice_total_bw.append(dice)
            ppv_total_bw.append(ppv)
            sensitivity_total_bw.append(sensiti)
            acc_total_bw.append(acc)
            dice_hard_total_bw.append(dicehard)
    ##################################################################################

    endtime = time.time()
    lenHist = np.array(lenHist)
    lossHist = np.array(lossHist)
    mean_loss = np.sum(lossHist)

    mean_dice_hard_bw = np.mean(np.array(dice_hard_total_bw))
    mean_sensiti_bw = np.mean(np.array(sensitivity_total_bw))
    mean_ppv_bw = np.mean(np.array(ppv_total_bw))
    mean_acc_bw = np.mean(np.array(acc_total_bw))
    mean_dice_bw = np.mean(np.array(dice_total_bw))

    mean_dice = {}
    mean_dice_hard = {}
    mean_ppv = {}
    mean_sensiti = {}
    mean_acc = {}

    mean_dice_all = 0
    mean_dice_hard_all = 0
    mean_ppv_all = 0
    mean_sensiti_all = 0
    mean_acc_all = 0

    for curkey in dice_total.keys():
        curdice = np.mean(np.array(dice_total[curkey]))
        mean_dice[curkey] = curdice
        mean_dice_all += curdice

        curdicehard = np.mean(np.array(dice_hard_total[curkey]))
        mean_dice_hard[curkey] = curdicehard
        mean_dice_hard_all += curdicehard

        curppv = np.mean(np.array(ppv_total[curkey]))
        mean_ppv[curkey] = curppv
        mean_ppv_all += curppv

        cursensi = np.mean(np.array(sensitivity_total[curkey]))
        mean_sensiti[curkey] = cursensi
        mean_sensiti_all += cursensi

        curacc = np.mean(np.array(acc_total[curkey]))
        mean_acc[curkey] = curacc
        mean_acc_all += curacc

    mean_dice_all /= 2.
    mean_dice_hard_all /= 2.
    mean_ppv_all /= 2.
    mean_sensiti_all /= 2.
    mean_acc_all /= 2.
    print('Binary Vessel Segmentation')
    print(
        'Train, epoch %d, loss %.4f, accuracy %.4f, sensitivity %.4f, dice %.4f, dice hard %.4f, ppv %.4f, time %3.2f, lr % .5f '
        % (epoch, mean_loss, mean_acc_bw, mean_sensiti_bw, \
           mean_dice_bw, mean_dice_hard_bw, mean_ppv_bw, endtime - starttime, lr))

    print('Artery/Vein Classification')
    print(
        'Train, epoch %d, loss %.4f, accuracy %.4f, sensitivity %.4f, dice %.4f, dice hard %.4f, ppv %.4f, time %3.2f, lr % .5f '
        % (epoch, mean_loss, mean_acc_all, mean_sensiti_all, \
           mean_dice_all, mean_dice_hard_all, mean_ppv_all, endtime - starttime, lr))

    print("Artery accuracy %.4f, sensitivity %.4f, dice %.4f, dice hard %.4f, ppv %.4f" % (mean_acc[1], \
                                                                                           mean_sensiti[1],
                                                                                           mean_dice[1],
                                                                                           mean_dice_hard[1],
                                                                                           mean_ppv[1]))
    print("Vein accuracy %.4f, sensitivity %.4f, dice %.4f, dice hard %.4f, ppv %.4f" % (mean_acc[2], \
                                                                                         mean_sensiti[2], mean_dice[2],
                                                                                         mean_dice_hard[2],
                                                                                         mean_ppv[2]))

    print()
    empty_cache()
    return mean_loss, mean_acc, mean_sensiti, mean_dice, mean_ppv


def val_casenet(epoch, model, data_loader, args, save_dir, test_flag=False):
    """
    :param epoch: current epoch number
    :param model: CNN model
    :param data_loader: evaluation and testing data
    :param args: global arguments args
    :param save_dir: save directory
    :param test_flag: current mode of validation or testing
    :return: performance evaluation of the current epoch
    """
    model.eval()
    sidelen = args.stridev
    if args.cubesizev is not None:
        margin = args.cubesizev
    else:
        margin = args.cubesize

    starttime = time.time()
    name_total = []
    lossHist = []
    lenHist = []

    dice_total = {}
    ppv_total = {}
    acc_total = {}
    dice_hard_total = {}
    sensitivity_total = {}

    dice_total_bw = []
    ppv_total_bw = []
    acc_total_bw = []
    dice_hard_total_bw = []
    sensitivity_total_bw = []

    if test_flag:
        valdir = os.path.join(save_dir, 'test%03d' % (int(epoch)))
    else:
        valdir = os.path.join(save_dir, 'val%03d' % (int(epoch)))
    if not os.path.exists(valdir):
        os.mkdir(valdir)

    p_total = {}
    x_total = {}
    y_total = {}
    y_total_bw = {}
    p_total_bw = {}
    feat3_total = {}
    feat4_total = {}
    feat5_total = {}
    feat6_total = {}
    #################
    with torch.no_grad():
        for i, (x, y, coord, ybw, ymask, airway, org, spac, NameID, SplitID, nzhw, ShapeOrg) in enumerate(
                tqdm(data_loader)):
            ######Wrap Tensor##########
            NameID = NameID[0]
            SplitID = SplitID[0]
            batchlen = x.size(0)
            x = x.cuda()
            y = y.cuda()
            ####################################################
            coord = coord.cuda()
            ybw = ybw.cuda()
            maskdata = ymask.numpy()
            maskdata = maskdata.astype('float')
            ymask = ymask.cuda()
            ymask.requires_grad = False

            casePreds, vesselbw, attentions = model(x, coord)

            loss = bin_dice_loss(vesselbw, ybw)

            # loss += binary_cross_entropy(vesselbw, ybw)
            loss += binary_focal_loss(vesselbw, ybw)

            if args.deepsupervision:
                casePred, ds6, ds7, ds8 = casePreds[0], casePreds[1], casePreds[2], casePreds[3]

                dceloss = dice_loss(casePred, y, mask=ymask) + \
                          dice_loss(ds6, y, mask=ymask) + \
                          dice_loss(ds7, y, mask=ymask) + \
                          dice_loss(ds8, y, mask=ymask)

                # celoss = cross_entropy(casePred, y, mask=ymask) + \
                # cross_entropy(ds6, y, mask=ymask) + \
                # cross_entropy(ds7, y, mask=ymask) + \
                # cross_entropy(ds8, y, mask=ymask)

                fcloss = focal_loss(casePred, y, mask=ymask) + \
                         focal_loss(ds6, y, mask=ymask) + \
                         focal_loss(ds7, y, mask=ymask) + \
                         focal_loss(ds8, y, mask=ymask)

            else:
                casePred = casePreds

                dceloss = dice_loss(casePred, y, mask=ymask)

                # celoss = cross_entropy(casePred, y, mask=ymask)

                fcloss = focal_loss(casePred, y, mask=ymask)

            if dceloss is not None:
                loss += dceloss

            # if celoss is not None:
            # 	loss += celoss

            if fcloss is not None:
                loss += fcloss

            if args.sadencoder == 1:
                # attentions 0, 1, 2
                gamma_sad = [0.1, 0.1, 0.1]
                for iter_sad in range(2):
                    loss += (gamma_sad[iter_sad]) * sad_loss(attentions[iter_sad], attentions[iter_sad + 1],
                                                             encoder_flag=True)

            if args.saddecoder == 1:
                # attentions 3, 4, 5, 6
                gamma_sad = [0.2, 0.3, 0.4]
                for iter_sad in range(3, 6):
                    loss += (gamma_sad[iter_sad - 3]) * sad_loss(attentions[iter_sad], attentions[iter_sad + 1],
                                                                 encoder_flag=False)

            # for evaluation
            lossHist.append(loss.item())
            lenHist.append(batchlen)

            casePred = F.softmax(casePred, dim=1)

            #####################segmentation calculating metrics#######################
            outdata = casePred.cpu().data.numpy()
            outdatabw = np.argmax(outdata, axis=1)
            outdatabw = outdatabw.astype('float')
            vesseldata = vesselbw.cpu().data.numpy()
            vesseldatabw = (vesseldata > th_bin)
            ybwdata = ybw.cpu().data.numpy()
            segdata = y.cpu().data.numpy()
            xdata = x.cpu().data.numpy()
            origindata = org.numpy()
            spacingdata = spac.numpy()

            if args.featsave == 1:
                feat3 = attentions[3].cpu().data.numpy()
                feat4 = attentions[4].cpu().data.numpy()
                feat5 = attentions[5].cpu().data.numpy()
                feat6 = attentions[6].cpu().data.numpy()
            #######################################################################
            #################REARRANGE THE DATA BY SPLIT ID########################
            for j in range(batchlen):
                # curxdata = (xdata[j, 0]*255)
                curydata = segdata[j, 0]
                segpred = outdatabw[j] * maskdata[j, 0]
                curybwdata = ybwdata[j, 0]
                curpbwdata = vesseldata[j, 0]

                curorigin = origindata[j].tolist()
                curspacing = spacingdata[j].tolist()
                cursplitID = int(SplitID[j])
                assert (cursplitID >= 0)
                curName = NameID[j]
                curnzhw = nzhw[j]
                curshape = ShapeOrg[j]

                if not (curName in x_total.keys()):
                    x_total[curName] = []
                if not (curName in y_total.keys()):
                    y_total[curName] = []
                if not (curName in p_total.keys()):
                    p_total[curName] = []
                if not (curName in y_total_bw.keys()):
                    y_total_bw[curName] = []
                if not (curName in p_total_bw.keys()):
                    p_total_bw[curName] = []
                if not (curName in feat3_total.keys()):
                    feat3_total[curName] = []
                if not (curName in feat4_total.keys()):
                    feat4_total[curName] = []
                if not (curName in feat5_total.keys()):
                    feat5_total[curName] = []
                if not (curName in feat6_total.keys()):
                    feat6_total[curName] = []

                # curxinfo = [curxdata, cursplitID, curnzhw, curshape, curorigin, curspacing]
                curyinfo = [curydata, cursplitID, curnzhw, curshape, curorigin, curspacing]
                curpinfo = [segpred, cursplitID, curnzhw, curshape, curorigin, curspacing]
                curpbwinfo = [curpbwdata, cursplitID, curnzhw, curshape, curorigin, curspacing]
                curybwinfo = [curybwdata, cursplitID, curnzhw, curshape, curorigin, curspacing]
                # x_total[curName].append(curxinfo)
                y_total[curName].append(curyinfo)
                p_total[curName].append(curpinfo)
                p_total_bw[curName].append(curpbwinfo)
                y_total_bw[curName].append(curybwinfo)

                if args.featsave == 1:
                    curfeat3 = feat3[j, 0]
                    curfeat4 = feat4[j, 0]
                    curfeat5 = feat5[j, 0]
                    curfeat6 = feat6[j, 0]
                    curfeat3 = zoom(curfeat3, 8, order=0, mode='nearest')  # 8
                    curfeat4 = zoom(curfeat4, 4, order=0, mode='nearest')  # 4
                    curfeat5 = zoom(curfeat5, 2, order=0, mode='nearest')  # 2
                    curf3info = [curfeat3, cursplitID, curnzhw, curshape, curorigin, curspacing]
                    curf4info = [curfeat4, cursplitID, curnzhw, curshape, curorigin, curspacing]
                    curf5info = [curfeat5, cursplitID, curnzhw, curshape, curorigin, curspacing]
                    curf6info = [curfeat6, cursplitID, curnzhw, curshape, curorigin, curspacing]
                    feat3_total[curName].append(curf3info)
                    feat4_total[curName].append(curf4info)
                    feat5_total[curName].append(curf5info)
                    feat6_total[curName].append(curf6info)

    ##################################################################################
    # combine all the cases together
    for curName in x_total.keys():
        # curx = x_total[curName]
        cury = y_total[curName]
        curp = p_total[curName]
        curp_bw = p_total_bw[curName]
        cury_bw = y_total_bw[curName]

        # x_combine, xorigin, xspacing = combine_total(curx, sidelen, margin)
        y_combine, curorigin, curspacing = combine_total(cury, sidelen, margin)
        p_combine_bw, porigin, pspacing = combine_total(curp, sidelen, margin)
        p_vessel, _, _ = combine_total_avg(curp_bw, sidelen, margin)
        p_vessel_bw = (p_vessel > th_bin)
        y_vessel_bw, _, _ = combine_total(cury_bw, sidelen, margin)

        ######################combine all splited cubes###############################
        # curpath = os.path.join(valdir, '%s-case-org.nii.gz'%(curName))
        curypath = os.path.join(valdir, '%s-case-gt.nii.gz' % (curName))
        curpredpath = os.path.join(valdir, '%s-case-pred.nii.gz' % (curName))
        curpredbwpath = os.path.join(valdir, '%s-case-vesselbw.nii.gz' % (curName))
        curybwpath = os.path.join(valdir, '%s-case-gtbw.nii.gz' % (curName))
        curdiff = os.path.join(valdir, '%s-case-diff.nii.gz' % (curName))
        condition1 = (p_combine_bw != 0) & (y_combine == 0)
        condition2 = (p_combine_bw == 0) & (y_combine == 1)
        condition3 = (p_combine_bw == 2) & (y_combine == 1)
        condition4 = (p_combine_bw == 0) & (y_combine == 2)
        condition5 = (p_combine_bw == 1) & (y_combine == 2)
        gtdiff = condition1.astype('float') * 1.0 + condition2.astype('float') * 2.0 + \
                 condition3.astype('float') * 3.0 + condition4.astype('float') * 4.0 + condition5.astype('float') * 5.0

        ########################################################################
        # save_itk(x_combine.astype(dtype='uint8'), curorigin, curspacing, curpath)
        save_itk(y_combine.astype(dtype='uint8'), curorigin, curspacing, curypath)
        save_itk(p_combine_bw.astype(dtype='uint8'), curorigin, curspacing, curpredpath)
        save_itk(y_vessel_bw.astype(dtype='uint8'), curorigin, curspacing, curybwpath)
        save_itk(p_vessel_bw.astype(dtype='uint8'), curorigin, curspacing, curpredbwpath)
        save_itk(gtdiff.astype(dtype='uint8'), curorigin, curspacing, curdiff)

        if args.featsave == 1:
            curf3 = feat3_total[curName]
            curf4 = feat4_total[curName]
            curf5 = feat5_total[curName]
            curf6 = feat6_total[curName]
            f3, forg, fspac = combine_total_avg(curf3, sidelen, margin)
            f4, _, _ = combine_total_avg(curf4, sidelen, margin)
            f5, _, _ = combine_total_avg(curf5, sidelen, margin)
            f6, _, _ = combine_total_avg(curf6, sidelen, margin)

            f3 = normalize_min_max(f3) * 255
            f4 = normalize_min_max(f4) * 255
            f5 = normalize_min_max(f5) * 255
            f6 = normalize_min_max(f6) * 255

            curf3path = os.path.join(valdir, '%s-case-f3.nii.gz' % (curName))
            curf4path = os.path.join(valdir, '%s-case-f4.nii.gz' % (curName))
            curf5path = os.path.join(valdir, '%s-case-f5.nii.gz' % (curName))
            curf6path = os.path.join(valdir, '%s-case-f6.nii.gz' % (curName))

            save_itk(f3.astype(dtype='uint8'), curorigin, curspacing, curf3path)
            save_itk(f4.astype(dtype='uint8'), curorigin, curspacing, curf4path)
            save_itk(f5.astype(dtype='uint8'), curorigin, curspacing, curf5path)
            save_itk(f6.astype(dtype='uint8'), curorigin, curspacing, curf6path)

        ########################################################################
        p1bw = (p_combine_bw == 1)
        p2bw = (p_combine_bw == 2)

        gt1 = (y_combine == 1)
        gt2 = (y_combine == 2)
        ##########################################################################
        dice_total[curName] = [dice_coef_np(p1bw, gt1), dice_coef_np(p2bw, gt2)]
        ppv_total[curName] = [ppv_np(p1bw, gt1), ppv_np(p2bw, gt2)]
        sensitivity_total[curName] = [sensitivity_np(p1bw, gt1), sensitivity_np(p2bw, gt2)]
        acc_total[curName] = [acc_np(p1bw, gt1), acc_np(p2bw, gt2)]
        dice_hard_total[curName] = [dice_coef_np(p1bw, gt1), dice_coef_np(p2bw, gt2)]

        ########################################################################
        name_total.append(curName)
        dice_total_bw.append(dice_coef_np(p_vessel, y_vessel_bw))
        ppv_total_bw.append(ppv_np(p_vessel_bw, y_vessel_bw))
        sensitivity_total_bw.append(sensitivity_np(p_vessel_bw, y_vessel_bw))
        acc_total_bw.append(acc_np(p_vessel_bw, y_vessel_bw))
        dice_hard_total_bw.append(dice_coef_np(p_vessel_bw, y_vessel_bw))

        del cury, curp, y_combine, p_combine_bw, p_vessel_bw, y_vessel_bw

    endtime = time.time()
    lenHist = np.array(lenHist)
    lossHist = np.array(lossHist)

    all_results = {'carve14': []}

    with open(os.path.join(valdir, 'val_results.csv'), 'w') as csvout:
        writer = csv.writer(csvout)
        row = ['name', \
               'Artery acc', 'Vein acc', \
               'Artery sensi', 'Vein sensi', \
               'Artery dice', 'Vein dice', \
               'Artery diceh', 'Vein diceh', \
               'Artery ppv', 'Vein ppv', \
               'Vessel acc', 'Vessel sensi', 'Vessel dice', 'Vessel diceh', 'Vessel ppv']
        writer.writerow(row)

        for i in range(len(name_total)):
            name = name_total[i]
            keyw = 'carve14'

            row = [name] + acc_total[name] + sensitivity_total[name] + \
                  dice_total[name] + dice_hard_total[name] + ppv_total[name]
            row += [acc_total_bw[i], sensitivity_total_bw[i], dice_total_bw[i], dice_hard_total_bw[i], ppv_total_bw[i]]

            all_results[keyw].append(row[1:])
            writer.writerow(row)

        carve14_results = np.mean(np.array(all_results['carve14']), axis=0)
        carve14_results2 = np.std(np.array(all_results['carve14']), axis=0)

        carve14_mean = ['carve14 mean'] + carve14_results.tolist()
        carve14_std = ['carve14 std'] + carve14_results2.tolist()

        writer.writerow(carve14_mean)
        writer.writerow(carve14_std)

        csvout.close()

    mean_dice = np.mean(np.array(carve14_mean[5:7]))
    mean_dice_hard = np.mean(np.array(carve14_mean[7:9]))
    mean_ppv = np.mean(np.array(carve14_mean[9:11]))
    mean_sensiti = np.mean(np.array(carve14_mean[3:5]))
    mean_acc = np.mean(np.array(carve14_mean[1:3]))
    mean_loss = np.sum(lossHist)

    mean_dice_hard_bw = np.mean(np.array(dice_hard_total_bw))
    mean_sensiti_bw = np.mean(np.array(sensitivity_total_bw))
    mean_ppv_bw = np.mean(np.array(ppv_total_bw))
    mean_acc_bw = np.mean(np.array(acc_total_bw))
    mean_dice_bw = np.mean(np.array(dice_total_bw))

    print('Binary Vessel Segmentation')
    print('Val, epoch %d, loss %.4f, accuracy %.4f, sensitivity %.4f, dice %.4f, dice hard %.4f, ppv %.4f, time %3.2f'
          % (epoch, mean_loss, mean_acc_bw, mean_sensiti_bw, \
             mean_dice_bw, mean_dice_hard_bw, mean_ppv_bw, endtime - starttime))

    print('Artery/Vein Classification')
    print('Val, epoch %d, loss %.4f, accuracy %.4f, sensitivity %.4f, dice %.4f, dice hard %.4f, ppv %.4f, time %3.2f'
          % (epoch, mean_loss, mean_acc, mean_sensiti, mean_dice, mean_dice_hard, mean_ppv, endtime - starttime))

    print("Artery accuracy %.4f, sensitivity %.4f, dice %.4f, dice hard %.4f, ppv %.4f" % (carve14_mean[1], \
                                                                                           carve14_mean[3],
                                                                                           carve14_mean[5],
                                                                                           carve14_mean[7],
                                                                                           carve14_mean[9]))
    print("Vein accuracy %.4f, sensitivity %.4f, dice %.4f, dice hard %.4f, ppv %.4f" % (carve14_mean[2], \
                                                                                         carve14_mean[4],
                                                                                         carve14_mean[6],
                                                                                         carve14_mean[8],
                                                                                         carve14_mean[10]))

    print()
    empty_cache()
    return mean_loss, mean_acc, mean_sensiti, mean_dice, mean_ppv
