# pre packages
from myglobal import *

# sys packages
from pylab import np
import obspy as ob
from glob import glob
from tqdm import tqdm
from obspy.core.utcdatetime import UTCDateTime
import pandas as pd
from scipy.stats import linregress
from scipy.interpolate import interp1d


# self packages
from utils.loc import load_loc, get_distance,sort_data_by_distance, get_stats_within_distance
from utils.math import norm, my_vel_regress
from corr4py.corr import corr_with_wavelets
from utils.h5data import get_event_data, save_h5_data, read_h5_data,h5py,h5glob, item_exists
from utils.hsr import get_hsr_slices_N, get_hsr_slices_S
from utils.plot import plot_traces_by_subfigures, plot_events,plot_raw_data
from utils.trace import get_tapered_slices, filtfilt

USE_D3_EVENTS = True  # 是否使用混合列车数据，南北都有
USE_D1_EVENTS = True  # 是否使用北向列车数据
USE_D2_EVENTS = True  # 是否使用南向列车数据
TS_WAVELET, TE_WAVELET = -10,15        # 列车理论到时的偏移值,用于去除列车在头顶的情况，确保列车与测线的距离够长
THREADS_NUM = 8
N_FIG = 30

if __name__ == '__main__':
    # cmd
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument('-debug', action='store_true', help='method: DEBUG')
    parser.add_argument('-mute', action='store_true', help='method: mute unvalid times')
    parser.add_argument('-raw', action='store_true', help='method: use raw data to corr')
    parser.add_argument('-base', default='DZ155', help='BASE_STAT, default  is DZ155')
    parser.add_argument('-line', default='VFHF', help='LINE_ID, default is VFHF')
    parser.add_argument('-emarker', default='', help='marker in previous processes')
    parser.add_argument('-ts', default=-20, type=int, help='start time of corr, unit:s, type: int')
    parser.add_argument('-te', default=20,type=int, help='end time of corr, unit:s, type: int')
    parser.add_argument('-fs', default=-1, type=float, help='start frequency, unit:Hz, type: float')
    parser.add_argument('-fe', default=-1,type=float, help='stop time, unit:Hz, type: float')
    parser.add_argument('-L', default=0,type=int, help='length of corr, unit:s, type: int')
    parser.add_argument('-Tpos', default='N',type=str, help='location of train,N,S, default is N')

    parser.add_argument('-noD1', action='store_true', help='method: discard S2N events')
    parser.add_argument('-noD2', action='store_true', help='method: discard N2S events')
    parser.add_argument('-noD3', action='store_true', help='method: discard mixed events')

    parser.add_argument('-date', default='',  help='which data to use, 2303, 2406, 2409, 254C')
    parser.add_argument('-input', default='',  help='input h5file')
    parser.add_argument('-output', default='',  help='output corr h5 file')
    parser.add_argument('-figroot', default='figures/4.corr.figures',  help='root to save figs')

    args = parser.parse_args()
    print(args)

    DEBUG= args.debug
    MUTE = args.mute
    USE_RAW = args.raw
    LINE_ID = args.line
    BASE_STAT=args.base
    EMARKER = args.emarker
    DATA_FILE = args.input
    OUTFILE = args.output
    FIG_ROOT = args.figroot
    TPOS = args.Tpos

    USE_D1_EVENTS = not args.noD1
    USE_D2_EVENTS = not args.noD2
    USE_D3_EVENTS = not args.noD3

    DATA_FOLDER = os.path.dirname(DATA_FILE)
    fs = args.fs
    fe = args.fe
    FS = [fs,fe] if fs!=-1 and fe!=-1 else None

    T_LIM = [args.ts,args.te]
    TL = args.L
    if args.L !=0:
        T_LIM=[100,100+args.L]

    # 读取数据
    metadata, args_infile = read_h5_data(DATA_FILE, 
            keys=['MARKER','EMARKER','DATE'], group_name='metadata',FILL_NONE=True, FILL_VALUE=None,
            read_attrs=True)
    groups = read_h5_data(DATA_FILE, keys=['all_groups'])[0]
    groups = [g.decode() for g in groups]
    ne = len(groups)
    VINFO = read_h5_data(DATA_FILE, keys=['info/VINFO'], FILL_NONE=True)[0]
    x_infile= read_h5_data(DATA_FILE, keys=['info/x'], FILL_NONE=True)[0]

    N_per_FIG = int(len(groups)/N_FIG)
    print(f'find {len(groups)} groups in {DATA_FILE}')
    

    # 获取配置信息
    EMARKER = f'{metadata[0].decode()}' if not args.emarker else args.emarker
    DATE= metadata[2].decode() if not args.date else args.date
    AMP_HSR = args_infile['amp']

    # 使用新的get_info函数获取配置信息
    date_info = get_info(DATE)
    s_info = date_info['s_info']
    LINES = date_info['LINES']
    if LINE_ID in LINES.keys():
        CORR_STATS = LINES[LINE_ID]
    else:
        CORR_STATS = get_stats_within_distance(s_info, base=BASE_STAT,r0=-1000,r1=1000)

    MARKER = f'{BASE_STAT}.Line{LINE_ID}.T{T_LIM[0]:02d}.{T_LIM[1]:02d}.Tp.{TPOS}'
    MARKER += '.D'+'1'*USE_D1_EVENTS+'2'*USE_D2_EVENTS+'3'*USE_D3_EVENTS

    FIG_ROOT = f'{FIG_ROOT}/{DATE}.{MARKER}.{EMARKER}'
    if not os.path.exists(FIG_ROOT):
        os.mkdir(FIG_ROOT)
        
    OUTFILE = f'{DATA_FOLDER}/{EMARKER}.corr.{MARKER}.h5' if not OUTFILE else OUTFILE

    print('#'*100)
    print(f'{OUTFILE=}')
    print(f'{FIG_ROOT=}')
    print('#'*100)

    dt=0.01
    # dt=0.1
    ts, te = -10,10 # 互相关滑动时间

    x = []
    for name in CORR_STATS:
        xi, _,_ = get_distance(s_info=s_info,name1=BASE_STAT,name2=name, S2N=True)
        x.append(xi)
    x,CORR_STATS = sort_data_by_distance(x,NAMES=CORR_STATS)
    x = np.array(x)
    nx = len(x)

    save_h5_data(file_name=f'{OUTFILE}', 
                attrs_dict=vars(args), 
                group_name='metadata',
                data_in_dict={'OUTFILE':OUTFILE,
                            'INFILE':DATA_FILE,
                            'MARKER':MARKER,
                            'EMARKER':EMARKER,
                            'DATE':DATE,
                            'BASE_STAT':BASE_STAT},mode='w')

    IT = enumerate(groups)
    groups_out = []
    VINFO_out = []

    for i,group_i in IT:
        # if i!=3341:
        #     continue
        print(MARKER, i, group_i)
        data_i, t, e_time, STATS_infile= read_h5_data(DATA_FILE, keys=['data','t','te','stats'],group_name=group_i)
        assert abs((t[1]-t[0])-dt)<1e-5
        dt = t[1]-t[0]
        STATS_infile = [i.decode() for i in STATS_infile]
        e_time = e_time.decode()

        # 获取截取后的数据
        if TPOS=='N':
            traces, t, TLIM_i, USED = get_hsr_slices_N(data_i, t, CORR_STATS, BASE_STAT, L_WIN=T_LIM[1]-T_LIM[0], 
                                        v_info=VINFO[i,:], x_infile=x_infile, STATS_infile=STATS_infile,
                                        TS_WAVELET=TS_WAVELET,TE_WAVELET=TE_WAVELET,
                                        USE_D1_EVENTS=USE_D1_EVENTS,USE_D2_EVENTS=USE_D2_EVENTS,USE_D3_EVENTS=USE_D3_EVENTS)
            if not USED or traces.max()>AMP_HSR*3:
                print(f'{group_i} is not used')
                continue
        elif TPOS=='S':
            traces, t, TLIM_i, USED = get_hsr_slices_S(data_i, t, CORR_STATS, BASE_STAT, L_WIN=T_LIM[1]-T_LIM[0], 
                                        v_info=VINFO[i,:], x_infile=x_infile, STATS_infile=STATS_infile,
                                        TS_WAVELET=TS_WAVELET,TE_WAVELET=TE_WAVELET,
                                        USE_D1_EVENTS=USE_D1_EVENTS,USE_D2_EVENTS=USE_D2_EVENTS,USE_D3_EVENTS=USE_D3_EVENTS
                                        )
            if not USED or traces.max()>AMP_HSR*3:
                print(f'{group_i} is not used')
                continue
        else:
            traces = []
            for j,name in enumerate(CORR_STATS):
                trace_j = data_i[STATS_infile.index(name),:]
                traces.append(trace_j)
            # 直接使用T_LIM截取数据
            traces,t = get_tapered_slices(np.array(traces), t, T_LIM, L_Taper=1)
            USED = True

        groups_out.append(group_i)
        VINFO_out.append(VINFO[i,:])

        nt = len(t)
        idx_BASE = CORR_STATS.index(BASE_STAT)
        corr = corr_with_wavelets(traces.reshape([1,nx,nt]),traces[idx_BASE,:].reshape([1,nt]), 
                                sp=int(ts/dt), ep=int(te/dt),
                                THREADS_NUM=THREADS_NUM)

        corr = corr.reshape([nx,-1])

        t_corr = np.arange(int(ts/dt),int(te/dt))*dt
        
        data_dict = {
                'corr':corr,
                't':t_corr,
                'R':np.array(CORR_STATS,dtype='S'),
                'S':BASE_STAT,
                'te':e_time
            }
        save_h5_data(OUTFILE, data_in_dict=data_dict, group_name=f'{group_i}',mode='a')
        
        if DEBUG and i%N_per_FIG==0:
            vinfo_p = VINFO[i,1:4]
            vinfo_n = VINFO[i,4:]
            X_shift = x_infile[STATS_infile.index(BASE_STAT)]
            from pylab import figure, GridSpec, plt
            plt.close('all')
            fig = figure(figsize=(8, 8), dpi=300)
            gs = GridSpec(2,2)
            ax1 = fig.add_subplot(gs[0,:])
            ax2 = fig.add_subplot(gs[1,0])
            ax3 = fig.add_subplot(gs[1,1])
            PLOT_WIGGLE=False if nx>30 else True

            fig, _ = plot_events(traces, x, t,fig=fig, ax=ax1, 
                                DO_FILTER=False, PLOT_WIGGLE=PLOT_WIGGLE, NORM=False, SCALE=300)
            for v,a,e in [vinfo_p,vinfo_n]:
                ax1.plot([-100,100],[v*(-100+a)-X_shift,v*(100+a)-X_shift], lw=1, color='blue')
            ax1.set(
                    xlim=[TLIM_i[0]-15,TLIM_i[1]+15],
                    ylim=[x.min()-300,x.max()+300],
                    title=f'v={vinfo_p[0]:.1f},{vinfo_n[0]:.1f}')
            if not PLOT_WIGGLE:
                plt.clim([-traces.var()**0.5,traces.var()**0.5])
            print(corr.shape, t_corr.shape)
            corr = norm(corr,ONE_AXIS=True)
            fig, _,_ = plot_raw_data(corr, x, t_corr, fig=fig, ax1=ax2,ax2 = ax3,
                                    fs=1, fe=40, VLIM=[-4000,4000], PLOT_WIGGLE=PLOT_WIGGLE, SCALE=300, FV_NORM=False)
            ax2.set_title('b) corr')
            ax3.set_title('c) FV of b')
            ax3.collections[-1].set_clim([0,0.3])
            fig.tight_layout()
            fig.savefig(f'{FIG_ROOT}/{i:04d}.corr.{MARKER}.{e_time}.png')
            # break
            
    save_h5_data(OUTFILE,{'all_groups':np.array(groups_out,dtype='S'),
                          'vinfo':np.array(VINFO_out),
                          'stats':np.array(CORR_STATS,dtype='S'),
                          }, mode='a')
    print(OUTFILE,' saved.')

