import os
import sys
from importlib import import_module

import data
from inner_utils import debug_dataloader
from trainval_classifier import test_casenet

sys.path.append('../')
from split_combine_mj import SplitComb
import torch
from torch.nn import DataParallel
from torch.utils.data import DataLoader
from prepare import preprocess_CT
from glob import glob
from tqdm import tqdm
from option import parser


def main():
    data_dir = r"D:\dataset\vessel\Parse_2022_train_data\PA000036\image"
    save_dir = r"D:\dataset\vessel\Parse_2022_train_data\PA000036\image"
    preprocess_CT(inputpath=data_dir, savepath=save_dir)

    global args
    args = parser.parse_args()
    torch.manual_seed(0)

    print('----------------------Load Model----------------------')
    model = import_module('baseline_fr')
    config, net = model.get_model(args)
    checkpoint = torch.load('./baseline_fr_ad.ckpt')
    net.load_state_dict(checkpoint['state_dict'])
    net = net.cuda()
    # cudnn.benchmark = True
    if args.multigpu:
        net = DataParallel(net)

    print('----------------------Check OutputDir----------------------')
    save_dir = args.outputpath

    if save_dir is None:
        if not os.path.exists('./results'):
            os.mkdir('./results')
        save_dir = './results'
    else:
        if not os.path.exists(save_dir):
            os.mkdir(save_dir)

    logfile = os.path.join(save_dir, 'log.txt')

    print('----------------------Check InputDir----------------------')
    data_dir = args.inputpath
    assert (os.path.exists(data_dir))
    if args.preprocess == 1:
        data_dir = preprocess_CT(inputpath=data_dir, savepath=save_dir)
    split_comber = SplitComb(args.stridev, args.cubesize)

    filelist = glob(os.path.join(data_dir, '*_clean_hu.nii.gz'))  # default nifty format
    # print(filelist)
    # skip already segmented images
    processedlst = [y.split('/')[-1].split('_airway')[0] for y in glob(os.path.join(save_dir, '*_airway.nii.gz'))]
    filelist = [x for x in filelist if not (x.split('/')[-1].split('_clean_hu.nii.gz')[0] in processedlst)]

    for data_dir in tqdm(filelist):
        assert (os.path.exists(data_dir))
        print("Start: ", data_dir)
        dataset_test = data.AirwayData(split_comber=split_comber, data_dir=data_dir)
        test_loader = DataLoader(
            dataset_test,
            batch_size=args.batch_size,
            shuffle=False,
            num_workers=args.workers,
            pin_memory=True)

        if args.debugdataloader:
            debug_dataloader(test_loader)

        print('----------------------Segmentation----------------------')
        test_casenet(net, test_loader, args, data_dir, save_dir)

    return


if __name__ == '__main__':
    main()
