
import numpy as np
DEVICE='cuda:0'

import os
from obspy.geodetics.base import gps2dist_azimuth
from scipy import interpolate,signal
from correlate import do_correlate2
import h5py
deg2rad = 1/180*np.pi
from functools import partial
import time

def correlate(data_raw,BASE_STAT,stats, ROOT='./data/', **paras):

    CHN='Z'  if paras.get('CHN') is None else paras.get('CHN')
    (s, e) = (0, 5) if paras.get('wavelet_L') is None else paras.get('wavelet_L')
    (sc, ec) = (-80,80) if paras.get('coor_L') is None else paras.get('coor_L')
    dt = 0.01 if paras.get('dt') is None else paras.get('dt')

    ne,ns,nt = data_raw.shape
    s_p, e_p = nt//2+int(s/dt), nt//2+int(e/dt)
    sc_p, ec_p = nt//2+int(sc/dt)+int(s/dt), nt//2+int(ec/dt)+int(s/dt)
    t_valid = np.linspace(sc,ec-dt, ec_p-sc_p)

    print('do preprocesses')
    data_correlateF = np.zeros([ec_p-sc_p, ns, ne], order='F') # 记录长度
    wavelet = np.zeros([e_p-s_p, ne], order='F')
    d = np.zeros([nt,ns, ne], order='F')
    for ie in range(ne):
        idx_BASE = stats.index(BASE_STAT)
        wavelet[:,ie] = data_raw[ie,idx_BASE,s_p:e_p] #高铁桥墩处台站为震源子波 
        for i_s in range(ns):
            d[:,i_s,ie] = data_raw[ie,i_s,:] # 数据转换为F格式
        print('\r {}|{}'.format(ie,ne), end='', flush=True)
    print('\ndo correlate')
    del data_correlateF
    data_correlateF = do_correlate2(d, wavelet, ne, ns, nt, e_p-s_p, sc_p, ec_p) 
    del d
    print('do norm')
    data_correlate = np.zeros([ne,ns,ec_p-sc_p]).astype('float32') # 正常格式
    for ie in range(ne):
        for i_s in range(ns):
            data_correlate[ie,i_s,:] = data_correlateF[:,i_s,ie] # 正常格式
        print('\r {}|{}'.format(ie,ne), end='', flush=True)

    return data_correlate

def correlate_by_fft(data_raw,wavelet, sc,ec,dt=0.01, SIGN=True):
    ne,ns,nt = data_raw.shape
    sc_p, ec_p =nt-1+int(sc/dt), nt-1+int(ec/dt)
    nt_coor = ec_p-sc_p
    data_c = np.zeros([ne,ns,nt_coor]).astype('float32')
    # print(data_raw.shape,wavelet.shape, data_c.shape)
    for i in range(ne):
        wavelet_i = wavelet[i,:]
        if SIGN:
            wavelet_i = np.sign(wavelet_i)
        for j in range(ns):
            trace_ij = data_raw[i,j,:]
            if SIGN:
                trace_ij = np.sign(trace_ij)
            coor_result = signal.correlate(trace_ij, wavelet_i, mode='full', method='auto')
            data_c[i,j,:] = coor_result[sc_p:ec_p]
    return data_c

def correlate_by_fft_2d(data_raw,wavelet, sc,ec,dt=0.01, SIGN=True):
    ns,nt = data_raw.shape
    sc_p, ec_p =nt-1+int(sc/dt), nt-1+int(ec/dt)
    nt_coor = ec_p-sc_p
    data_c = np.zeros([ns,nt_coor]).astype('float32')
    # print(data_raw.shape,wavelet.shape, data_c.shape)
    
    for j in range(ns):
        trace_j = data_raw[j,:]
        if SIGN:
            trace_j = np.sign(trace_j)
        coor_result = signal.correlate(trace_j, wavelet, mode='full', method='auto')
        data_c[j,:] = coor_result[sc_p:ec_p]
    return data_c

def correlate_by_fft_parallel(data_raw,wavelets, sc,ec,dt=0.01, SIGN=True, N_CORE=10):
    
    from multiprocessing.pool import Pool

    ne,ns,nt = data_raw.shape
    sc_p, ec_p =nt-1+int(sc/dt), nt-1+int(ec/dt)
    nt_coor = ec_p-sc_p

    input_args = []
    ts = time.time()
    print('multiprocesses start')
    N_job=N_CORE*20
    NE_per_core = ne//N_job+1
    for j in range(N_job):
        s_j = j*NE_per_core
        e_j = min(ne,(j+1)*NE_per_core)
        data_j = data_raw[s_j:e_j,:,:]
        wavelet_j = wavelets[s_j:e_j,:]
        input_args.append([data_j,wavelet_j, sc,ec,dt,False])    
    results=[]
    with Pool(N_CORE) as pool:
        results =results + pool.starmap(correlate_by_fft,input_args)
    data_correlate=np.zeros([ne,ns,nt_coor], dtype='float32')
    for j in range(N_job):
        data_j = results[j]
        s_j = j*NE_per_core
        e_j = min(ne,(j+1)*NE_per_core)
        data_correlate[s_j:e_j,:,:] = data_j
    print('multiprocesses end')
    te = time.time()
    print('take time :{:2.2f}s'.format(te-ts))
    del results

    return data_correlate

def Pcorrelate_by_ns(data_raw,wavelet, sc,ec,dt=0.01, SIGN=True, N_CORE=10):
    
    from multiprocessing.pool import Pool

    ns,nt = data_raw.shape
    sc_p, ec_p =nt-1+int(sc/dt), nt-1+int(ec/dt)
    nt_coor = ec_p-sc_p

    input_args = []
    ts = time.time()
    print('multiprocesses start')
    N_job=N_CORE*2
    NE_per_core = ns//N_job+1
    for j in range(N_job):
        s_j = j*NE_per_core
        e_j = min(ns,(j+1)*NE_per_core)
        data_j = data_raw[s_j:e_j,:]
        wavelet_j = wavelet
        input_args.append([data_j,wavelet_j, sc,ec,dt,SIGN])    
    results=[]
    with Pool(N_CORE) as pool:
        results =results + pool.starmap(correlate_by_fft_2d,input_args)
    data_correlate=np.zeros([ns,nt_coor], dtype='float32')
    for j in range(N_job):
        data_j = results[j]
        s_j = j*NE_per_core
        e_j = min(ns,(j+1)*NE_per_core)
        data_correlate[s_j:e_j,:] = data_j
    print('multiprocesses end')
    te = time.time()
    print('take time :{:2.2f}s'.format(te-ts))
    del results

    return data_correlate

def load_correlate(BASE_STAT,stats, **paras):

    CHN='Z'  if paras.get('CHN') is None else paras.get('CHN')
    dt = 0.01 if paras.get('dt') is None else paras.get('dt')
    LOAD_ALL = False if paras.get('LOAD_ALL') is None else paras.get('LOAD_ALL')
    add_keys = paras.get('add_keys')
    ns = len(stats)

    if  paras.get('file') is None:
        filename = './data/COOR_{}_C{}_S{:02d}.h5'.format(BASE_STAT, CHN,ns)
    else:
        filename = paras.get('file')
    
    print('loading file')
    
    with h5py.File(filename,'r') as f:
        stats_f = f['stats']
        BASE_STAT_f = f['BASE_STAT'][()]
        t_valid = f['t_valid'][()]
        dt_f = f['dt'][()]

        # 校验
        if len(stats_f)==ns and BASE_STAT_f == BASE_STAT and dt==dt_f:
            data_coor = f['data_correlate'][:]
            # print(stats_f[:])
        else:
            raise ValueError('{} not match with settings'.format(filename))
        mask = f['D_mask'][()]

        if LOAD_ALL:
            N_each_day=f['N_each_day'][:]
            Days=f['Days'][:]
            days_strings = []
            for i in Days:
                days_strings.append(i.decode('utf-8'))
            assert len(N_each_day)==len(days_strings)
            return_value = [data_coor, t_valid, mask,N_each_day,days_strings]
        else:
            return_value = [data_coor, t_valid, mask]
        
        if add_keys is not None:
            for key in add_keys:
                return_value.append(f[key][:])
        
    return return_value

def save_correlate(data_correlate, BASE_STAT,stats,t_valid, ROOT='./data/',SAVE=False, **paras):
    CHN='Z'  if paras.get('CHN') is None else paras.get('CHN')
    dt = 0.01 if paras.get('dt') is None else paras.get('dt')
    D_mask = 0 if paras.get('D_mask') is None else paras.get('D_mask')
    N_each_day = [0] if paras.get('N_each_day') is None else paras.get('N_each_day')
    Days = ['0'] if paras.get('Days') is None else paras.get('Days')

    ne,ns,nt = data_correlate.shape
    assert np.abs(dt-(t_valid[1]-t_valid[0]))<1e-8

    if  paras.get('file') is None:
        filename = ROOT+'COOR_{}_C{}_S{:02d}.h5'.format(BASE_STAT, CHN,ns)
    else:
        filename = paras.get('file')
    if SAVE:
        if os.path.exists(filename):
            filename = filename+'_new.h5'
        with h5py.File(filename,'w') as f:
            f.create_dataset('stats', data=np.array(stats,dtype='S'))
            f.create_dataset('data_correlate', data=data_correlate)
            f.create_dataset('BASE_STAT', data=BASE_STAT)
            f.create_dataset('t_valid', data=t_valid)
            f.create_dataset('dt', data=dt)
            f.create_dataset('D_mask', data=D_mask)
            f.create_dataset('N_each_day', data=np.array(N_each_day))
            f.create_dataset('Days', data=np.array(Days,dtype='S'))
            if paras.get('add_inf') is not None:
                add_inf = paras['add_inf']
                for i in add_inf.keys():
                    f.create_dataset(i, data=add_inf[i])
        print("save as {}".format(filename))

