#!/usr/bin/env python
import sys
import math
import numpy as np
import os, time, random
from multiprocessing import Pool
from nipy import load_image
import nibabel as nib
import scipy.io as sio

def calculate_fc_mat(name, file_dict, target_dir, symbol):
    print('Run task %s (%s)...' % (name, os.getpid()))
    start = time.time()

    slice_number = os.popen("mri_info " + file_dict['lh'][0] + " --nframes")
    slice_number = int(slice_number.readlines()[0])

    file_dict['lh'].sort()
    file_dict['rh'].sort()

    for sess_index in range(len(file_dict['lh'])):
        lh_img = nib.load(file_dict['lh'][sess_index])
        lh_data = np.reshape(lh_img.get_fdata(), [-1, slice_number], 'F')

        rh_img = nib.load(file_dict['rh'][sess_index])
        rh_data = np.reshape(rh_img.get_fdata(), [-1, slice_number], 'F')

        whole_data = np.vstack((lh_data, rh_data))

        corr_arr = np.corrcoef(whole_data)
        corr_arr = np.nan_to_num(corr_arr)

        end = time.time()

        print('Task %s runs %0.2f seconds.' % (name, (end - start)))

        print("########## Saving File ##########")
        data_dict = {}
        data_dict['fc'] = corr_arr

        sid = file_dict['lh'][sess_index].split('/')[7]
        sess_file = file_dict['lh'][sess_index].split('/')[-1]
        sess_id = sess_file.split('_')[2]

        file_name = target_dir + '/' + sid + '_' + sess_id + '_fc.mat'
        sio.savemat(file_name, data_dict)

        print("[SAVE >> %s ]: Save file in %s" % (sid, file_name))
        print("########## Done ##########")

def get_task_infos(project_rest_dir, task_file, target_suffix = 'fs4'):
    file_list = []

    fp = open(task_file, "r")
    for line in fp.readlines():
        subj_name = line.replace("\n", "")
        sub_dir = os.path.join(project_rest_dir, subj_name)
        sub_surf_dir = os.path.join(sub_dir, 'surf')

        surf_list = os.listdir(sub_surf_dir)

        tmp_dict = {}
        tmp_dict['lh'] = []
        tmp_dict['rh'] = []
        for surf_item in surf_list:
            if target_suffix in surf_item:
                full_target_file = os.path.join(sub_surf_dir, surf_item)
                if 'lh' in surf_item:
                    tmp_dict['lh'].append(full_target_file)
                if 'rh' in surf_item:
                    tmp_dict['rh'].append(full_target_file)

        if tmp_dict.__contains__('lh') and tmp_dict.__contains__('rh'):
            file_list.append(tmp_dict)
        else:
            print('[# WARNING: ] File not complete in `%s`]' % (sub_dir))

    return file_list, len(file_list)

if __name__=='__main__':
    print('Parent process %s.' % os.getpid())
    process_num = 10

    symbol = "OHSU"
    sub_type = "ASD"

    project_dir = '/home/liang/Projects/ASD_QC'

    target_dir = os.path.join(project_dir, 'Results/FC_mats/' + symbol)
    if not os.path.exists(target_dir):
        os.makedirs(target_dir)

    task_file = os.path.join(project_dir, 'Lists/censored_sub_list_' + sub_type + '_' + symbol + '.txt')
    processed_rest_dir = os.path.join(project_dir, 'DataProcessed/rest')
    task_list, task_size = get_task_infos(processed_rest_dir, task_file)

    # i=2
    # calculate_fc_mat(i,task_list[i],target_dir,symbol)
    # sys.exit(0)

    p = Pool(process_num)
    for i in range(task_size):
        p.apply_async(calculate_fc_mat, args=(i, task_list[i], target_dir, symbol,))
    print('Waiting for all subprocesses done...')
    p.close()
    p.join()
    print('All subprocesses done.')
