from multiprocessing import Pool
from LungSegmentation import lung_segmentation_scan
import h5py
from collections import deque
import time
import os


def main(infile, outfile, processes, maxlen):
    pool = Pool(processes=processes)
    queue = deque()
    with h5py.File(infile, 'r') as f:
        with h5py.File(outfile, 'w', libver='latest') as g:
            scanidlst = sorted(f.keys())
            for scanid in scanidlst:
                vol = f[scanid][...]
                spacing = f[scanid].attrs['spacing']
                queue.append((scanid, pool.apply_async(lung_segmentation_scan, (vol, spacing)), time.time()))
                if queue[0][1].ready() or len(queue) >= maxlen:
                    scanid, result, start_time = queue.popleft()
                    g.create_dataset(scanid, data=result.get())
                    g.flush()
                    print('processing %s using %.2f seconds' % (scanid, time.time() - start_time))
            while len(queue) > 0:
                scanid, result, start_time = queue.popleft()
                g.create_dataset(scanid, data=result.get())
                g.flush()
                print('processing %s using %.2f seconds' % (scanid, time.time() - start_time))


if __name__ == '__main__':
    main(
        infile='/data_4t/Kaggle/backup/lidc/vol.hdf5',
        outfile='/data_4t/Kaggle/backup/lidc/lungmask.hdf5',
        processes=os.cpu_count()-1,
        maxlen=2*(os.cpu_count()),
    )
