#!/usr/bin/env python
import sys
import math
import numpy as np
import os, time, random
from multiprocessing import Pool
from nipy import load_image
import nibabel as nib
import scipy.io as sio
import scipy.stats as sst

def calculate_fc_mat(name, file_dict, symbol):
    print('Run task %s (%s)...' % (name, os.getpid()))
    start = time.time()

    slice_number = os.popen("mri_info " + file_dict['lh'] + " --nframes")
    slice_number = int(slice_number.readlines()[0])

    lh_img = nib.load(file_dict['lh'])
    lh_data = np.reshape(lh_img.get_fdata(), [-1, slice_number], 'F')
    # tmp = lh_data[:,:,:,1]
    # print(tmp.shape)
    # print(tmp.reshape([2562,-1]))

    rh_img = nib.load(file_dict['rh'])
    rh_data = np.reshape(rh_img.get_fdata(), [-1, slice_number], 'F')

    whole_data = np.vstack((lh_data, rh_data))
    # print('#'*10 + 'Shape' + '#'*10)
    # print(whole_data.shape)

    # row = np.shape(whole_data)[0]

    corr_arr = np.corrcoef(whole_data)
    corr_arr = np.nan_to_num(corr_arr)

    # print('pearson: %f' % (target_r))
    # tmp_list = []
    # for i_label in range(2562):
    #     target_r = sst.pearsonr(whole_data[1973], whole_data[i_label])
    #
    #     if abs(target_r[0] - corr_arr[1973][i_label]) >= 1e-6:
    #         print('pearson: %f' % (target_r[0]))
    #         print('coef: %f' % (corr_arr[1973][i_label]))
    #
    #     tmp_list.append(target_r[0])
    # tmp_list = np.array(tmp_list)
    # print(tmp_list.shape)
    # tmp_list = np.reshape(tmp_list, [2562, -1])
    # print(tmp_list.shape)
    # print(tmp_list)
    tmp_list = sio.loadmat('/mri_projects/ASD_Analysis/Results/fc_mats/GU/GU2_28809_fc.mat')
    print(tmp_list['fc'].shape)
    corr = tmp_list['fc']
    lh_tmp_list = corr[773, 0:2562]
    lh_tmp_list = np.array(lh_tmp_list)
    rh_tmp_list = corr[773, 2562:5124]
    rh_tmp_list = np.array(rh_tmp_list)
    tem_data =  nib.load('/mri_projects/ASD_Analysis/Results/Variability/lh_ASD_GU_fs4_intra_variability.mgh')
    affine = tem_data.affine
    lh_tmp_list = lh_tmp_list.astype(np.float32)
    new_image = nib.MGHImage(lh_tmp_list, affine)
    nib.save(new_image, 'lh_res_gu_28809_773.mgh')

    rh_tmp_list = rh_tmp_list.astype(np.float32)
    new_image = nib.MGHImage(rh_tmp_list, affine)
    nib.save(new_image, 'rh_res_gu_28809_773.mgh')
    sys.exit(0)

    corr_arr = corr_arr[0:row, 0:row]

    end = time.time()

    print('Task %s runs %0.2f seconds.' % (name, (end - start)))

    # print("########## Saving File ##########")
    # data_dict = {}
    # data_dict['fc'] = corr_arr
    # sid = file_dict['lh'].split('/')[5]
    # file_name = './fc_mats/' + symbol + '/' + sid + '_fc.mat'
    # sio.savemat(file_name, data_dict)
    #
    # print("[SAVE >> %s ]: Save file in %s" % (sid, file_name))
    # print("########## Done ##########")
    return corr_arr

def get_task_infos(project_rest_dir, task_file, target_suffix = 'fs4'):
    file_list = []

    fp = open(task_file, "r")
    for line in fp.readlines():
        subj_name = line.replace("\n", "")
        sub_dir = os.path.join(project_rest_dir, subj_name)
        sub_surf_dir = os.path.join(sub_dir, 'surf')

        surf_list = os.listdir(sub_surf_dir)
        tmp_dict = {}
        for surf_item in surf_list:
            if target_suffix in surf_item:
                full_target_file = os.path.join(sub_surf_dir, surf_item)
                if 'lh' in surf_item:
                    tmp_dict['lh'] = full_target_file
                if 'rh' in surf_item:
                    tmp_dict['rh'] = full_target_file

        if tmp_dict.__contains__('lh') and tmp_dict.__contains__('rh'):
            file_list.append(tmp_dict)
        else:
            print('[# WARNING: ] File not complete in `%s`]' % (sub_dir))

    return file_list, len(file_list)

if __name__=='__main__':
    print('Parent process %s.' % os.getpid())
    process_num = 10

    symbol = "KKI"

    project_dir = '/mri_projects/ASD_Analysis'

    target_dir = os.path.join(project_dir, 'Scripts/fc_mats/' + symbol)
    if not os.path.exists(target_dir):
        os.makedirs(target_dir)

    task_file = os.path.join(project_dir, 'Lists/variability_list_ASD_' + symbol + '.txt')
    processed_rest_dir = os.path.join(project_dir, 'DataProcessed/rest')

    task_list, task_size = get_task_infos(processed_rest_dir, task_file)

    for p_index in range(task_size):
        tmp_list = calculate_fc_mat(p_index, task_list[p_index], symbol)
        data_list.append(tmp_list)

    data_list = np.array(data_list)
    np.save(data_file, data_list)
