"""
The main program to build the dataset for train and test
"""

import os
import argparse
import logging
import pickle

import numpy as np
import h5py

from dicom_parser import *


def build_lidc_dataset(lidc_dir, output_dir, version='v9'):
    """
    Build the dataset for lidc. Distinct version differs in the order of dicoms.
    :param lidc_dir: lidc original data, which could be downloaded free
    :param output_dir: target output directory to save the result
    :param version: 'v9' ordered by
    :return: None
    """
    if version == 'v9':
        order = 'Location'
    elif version == 'v15':
        order = 'InstanceNumber'
    else:
        raise ValueError('Unknown version')

    if not os.path.isdir(output_dir):
        os.makedirs(output_dir)

    uid2lidc = np.load('./uid2lidc.pkl')
    vol_dataset = h5py.File(os.path.join(output_dir, 'vol.hdf5'), 'x')

    # main
    patient_dirs = os.listdir(lidc_dir)
    patient_dirs.sort()
    for patient_dir in patient_dirs:
        for study_dir in glob('%s/%s/*' % (lidc_dir, patient_dir)):
            for series_name in os.listdir(study_dir):
                series_dir = os.path.join(study_dir, series_name)
                if not check_dcm_dir(series_dir):
                    continue

                scan_name = uid2lidc.get(series_name) # scans of interest
                if scan_name is None:
                    continue
                else:
                    print(scan_name)

                vol, sop, loc, spacing, slice_thickness, series_uid = parse_dcm_dir(series_dir, order=order)

                error_info = check_location(loc, spacing[-1])
                if error_info is not None:
                    logging.warning(error_info)

                vol_data = vol_dataset.create_dataset(scan_name, data=vol, dtype=np.int16,
                                                      chunks=(512, 512, 1), compression='gzip')
                vol_data.attrs['spacing'] = spacing
                if slice_thickness is not None:
                    vol_data.attrs['slice_thickness'] = slice_thickness

    vol_dataset.close()


def build_spie_dataset(spie_dir, output_dir):
    if not os.path.isdir(output_dir):
        os.makedirs(output_dir)

    vol_dataset = h5py.File(os.path.join(output_dir, 'vol.hdf5'), 'x')
    spie2uid = dict()

    # main
    for patient_dir in os.listdir(spie_dir):
        for study_dir in glob('%s/%s/*' % (spie_dir, patient_dir)):
            for series_name in os.listdir(study_dir):
                series_dir = os.path.join(study_dir, series_name)
                if not check_dcm_dir(series_dir):
                    continue

                print(patient_dir)

                vol, sop, loc, spacing, slice_thickness, series_uid = parse_dcm_dir(series_dir)
                vol_data = vol_dataset.create_dataset(patient_dir, data=vol, dtype=np.int16,
                                                      chunks=(512, 512, 1), compression='gzip')
                vol_data.attrs['spacing'] = spacing
                if slice_thickness is not None:
                    vol_data.attrs['slice_thickness'] = slice_thickness

                spie2uid[patient_dir] = series_uid

    with open(os.path.join(output_dir, 'spie2uid.pkl'), 'wb') as f:
        pickle.dump(spie2uid, f, -1)

    vol_dataset.close()


def build_kaggle_dataset(kaggle_dir, output_dir):
    if not os.path.isdir(output_dir):
        os.makedirs(output_dir)

    vol_dataset = h5py.File(os.path.join(output_dir, 'vol.hdf5'), 'x')
    kaggle_list = list()
    error_list = list()
    fatal_list = list()
    kaggle2uid = dict()

    for name in os.listdir(kaggle_dir):
        print(name)
        series_dir = os.path.join(kaggle_dir, name)
        if not check_dcm_dir(series_dir):
            continue

        try:
            vol, sop, loc, spacing, slice_thickness, series_uid = parse_dcm_dir(series_dir, order='InstanceNumber')

            error_info = check_location(loc, spacing[-1]) # check location, adjust spacing
            if error_info is not None:
                logging.warning(error_info)
                error_list.append(name)

            vol_data = vol_dataset.create_dataset(name, data=vol, dtype=np.int16,
                                                  chunks=(512, 512, 1), compression='gzip')
            vol_data.attrs['spacing'] = spacing
            if slice_thickness is not None:
                vol_data.attrs['slice_thickness'] = slice_thickness

            kaggle2uid[name] = series_uid
            kaggle_list.append(name)

        except Exception as e:
            logging.error(e)
            fatal_list.append(name)

    print('error_list:%d' % len(error_list))
    print('fatal_list:%d' % len(fatal_list))

    np.save(os.path.join(output_dir, 'test_list.npy'), kaggle_list)
    np.save(os.path.join(output_dir, 'error_list.npy'), error_list)
    np.save(os.path.join(output_dir, 'fatal_list.npy'), fatal_list)

    with open(os.path.join(output_dir, 'kaggle2uid.pkl'), 'wb') as f:
        pickle.dump(kaggle2uid, f, -1)

    vol_dataset.close()


if __name__ == '__main__':
    parser = argparse.ArgumentParser(description="Build dataset from dicom files")
    parser.add_argument("-i", "--input",
                        dest="input_dir", metavar="input dicom directory",
                        help="lidc, spie, kaggle stage1 and stage2")

    parser.add_argument("-o", "--output",
                        dest="output_dir", metavar="output directory",
                        help="output directory to save vol.hdf5 and other auxiliary info")

    parser.add_argument("-m", "--mode",
                        dest="mode", metavar="mode of input",
                        default='general', required=False,
                        help="structure of dicom directory.'lidc' or 'kaggle' or 'spie.")

    parser.add_argument("-v", "--version",
                        dest="version", metavar="version of lidc",
                        default=None, required=False,
                        help="version of lidc, v9 or v15")

    args = parser.parse_args()

    if args.mode == 'lidc':
        build_lidc_dataset(args.input_dir, args.output_dir, args.version)
    elif args.mode == 'kaggle':
        build_kaggle_dataset(args.input_dir, args.output_dir)
    elif args.mode == 'spie':
        build_spie_dataset(args.input_dir, args.output_dir)
    else:
        raise ValueError('Unknown mode.')

