# pre packages
from myglobal import *

# sys packages
from pylab import np
import obspy as ob
from glob import glob
from tqdm import tqdm
from obspy.core.utcdatetime import UTCDateTime
import pandas as pd
from scipy.stats import linregress
from scipy.interpolate import interp1d
import time


# self packages
from utils.loc import load_loc, get_distance,sort_data_by_distance
from utils.math import norm, my_vel_regress
from utils.fv import do_fv
from corr4py.corr import corr_with_wavelets
from utils.h5data import get_event_data, save_h5_data, read_h5_data,h5py,h5glob, item_exists
from utils.hsr import RawDataBase, search_events
from utils.plot import plot_traces_by_subfigures, plot_events,plot_raw_data
from utils.trace import get_tapered_slices,filtfilt,whiten_msnoise

def stack_along_train_pos(data_infile, STATS_infile, t,x,
                          VINFO,vs, VG=3000,fs=5,fe=7,WIN_L=8):
    nx,nt = data_infile.shape
    nv = len(vs)
    E = np.zeros([nx,nv])
    tmin, tmax = t[0], t[-1]
    dt = t[1]-t[0]

    LP = int(WIN_L/dt)

    v,a,e = VINFO #x = (t+a)*v
    
    # mute HSR amp
    for i in range(nx):
        ti = x[i]/v-a
        event_ts = max(0, int((ti-tmin-1)/dt))
        event_te = min(nt,int((ti-tmin+9)/dt))
        data_infile[i,event_ts:event_te] = 0
    
    # search
    IT = tqdm(range(nx),total=nx)
    for i in IT:
        time1 = time.time()
        xi = x[i]
        ti = xi/v-a - WIN_L/2+1

        sp = max(0,  int((ti-tmin)/dt))
        ep = min(nt, sp+LP)
        data_i = np.zeros([nx,nt])
        N_valid = 1
        for k in range(nx): 
            xk = x[k]
            if abs(xi-xk)>2000:
                continue
            N_valid+=1

            tik = ti+abs(xk-xi)/VG
            tsp = max(0, int((tik-tmin)/dt))
            tep = min(nt,tsp+LP)

            trace_ik = data_infile[k,tsp:tep]
            # trace_ik = whiten_msnoise(trace_ik, dt, fs, fe, FL=1)
            # trace_ik = norm(trace_ik)
            
            data_i[k,tsp:tep] = trace_ik

            sp = min(sp, tsp)
            ep = max(ep, tep)
        # 有效时间区间     
        
        data_i = data_i[:,sp:ep]
        
        # data_i = data_infile[:,sp:ep].copy()
        # data_i = norm(data_i,ONE_AXIS=True)

        time2 = time.time()
        FV,_,_ = do_fv(data_i,vs,x=np.abs(x),dt=dt,
                       NORM=False,fs=fs,fe=fe)
        FV = np.abs(FV)

    
        E[i,:] = FV.sum(axis=0)/N_valid
        # E[i,:] = FV.max(axis=0)
        time3 = time.time()
        # print(i,xi, ti,t[sp],t[ep], v)   
        # print('time:',time3-time2,time2-time1)
    
    return E

# cmd
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-debug', action='store_true', help='method: DEBUG')
parser.add_argument('-mute', action='store_true', help='method: mute unvalid times')
parser.add_argument('-raw', action='store_true', help='method: use raw data to corr')
parser.add_argument('-base', default='DZ155', help='BASE_STAT, default  is DZ155')
parser.add_argument('-line', default='VFHF', help='LINE_ID, default is VFHF')
parser.add_argument('-emarker', default='', help='marker in previous processes')
parser.add_argument('-ts', default=-20, type=int, help='start time of corr, unit:s, type: int')
parser.add_argument('-te', default=20,type=int, help='end time of corr, unit:s, type: int')
parser.add_argument('-date', default='',  help='which data to use, 2303, 2406, 2409, 254C')
parser.add_argument('-input', default='',  help='input h5file')
parser.add_argument('-output', default='',  help='output corr h5 file')
parser.add_argument('-figroot', default='figures/debug/3.site',  help='root to save figs')

args = parser.parse_args()
print(args)

DEBUG= args.debug
MUTE = args.mute
USE_RAW = args.raw
LINE_ID = args.line
BASE_STAT=args.base
EMARKER = args.emarker
DATA_FILE = args.input
OUTFILE = args.output
FIG_ROOT = args.figroot

T_LIM = [args.ts,args.te]

metadata = read_h5_data(DATA_FILE, 
        keys=['MARKER','EMARKER','DATE'], group_name='metadata')
EMARKER = f'{metadata[1].decode()}.{metadata[0].decode()}' if not args.emarker else args.emarker
DATE= metadata[2].decode() if not args.date else args.date

# 使用新的get_info函数获取配置信息
date_info = get_info(DATE)
s_info = date_info['s_info']

groups = read_h5_data(DATA_FILE, keys=['all_groups'])[0]
groups = [g.decode() for g in groups]
groups.sort()

print(f'find {len(groups)} groups in {DATA_FILE}')
ne = len(groups)

health_data, amp_data, t_health, STATS_infile,VINFO,x_infile = read_h5_data(
    DATA_FILE, keys=['HEALTH','AMP', 't_health','stats','VINFO','x'], group_name='info')
STATS_infile = [i.decode() for i in STATS_infile]

# save_h5_data(file_name=f'{OUTFILE}', 
#              attrs_dict=vars(args), 
#              group_name='metadata',
#              data_in_dict={'OUTFILE':OUTFILE,
#                            'INFILE':DATA_FILE,
#                            'EMARKER':EMARKER,
#                            'DATE':DATE,
#                            'BASE_STAT':BASE_STAT},mode='w')
# save_h5_data(OUTFILE,{'all_groups':np.array(groups,dtype='S'),
#                             'stats':np.array(STATS_infile,dtype='S'),
#                             'x':x_infile,
#                             'VINFO':VINFO},
#                         group_name='info', mode='a')

IT = enumerate(groups)


dt = 1e-2

for i,group_i in IT:
    if i <100:
        continue
    data_i, t, e_time, STATS_infile= read_h5_data(DATA_FILE, keys=['data','t','te','stats'],group_name=group_i)
    assert abs((t[1]-t[0])-dt)<1e-5
    dt = t[1]-t[0]
    STATS_infile = [i.decode() for i in STATS_infile]
    e_time = e_time.decode()

    if VINFO[i,0]==0 or VINFO[i,0]==3:
        print(f' skip {group_i} because of VINFO[0]={VINFO[i,0]}')
        continue
    elif VINFO[i,0]==1:
        vinfo_i = [VINFO[i,1],VINFO[i,2],VINFO[i,3]]
    elif VINFO[i,0]==2:
        vinfo_i = [VINFO[i,4],VINFO[i,5],VINFO[i,6]]
    
    
    print(group_i, vinfo_i)
    for fs in range(1,20):
        fe = fs+1
        # fs,fe = 2,3
        VS = np.linspace(200,4000,400)
        VG= 50000000
        WIN_L=8

        data_if = filtfilt(data_i, dt, [fs,fe], N_CORE=None, type='bandpass', BAR=False,order=4)
        
        E = stack_along_train_pos(data_if, STATS_infile, t,x_infile,
                                            vinfo_i,VS, VG=VG,fs=fs,fe=fe,WIN_L=WIN_L)
        
        if DEBUG:
            from pylab import figure, GridSpec, plt,pcolormesh,subplot
            plt.close('all')
            fig = figure(figsize=(8, 4), dpi=300)
            
            ax1 = subplot(121)
            # fig, _ = plot_events(data_if, x_infile, t,fig=fig, ax=ax1, 
            #                     DO_FILTER=False, PLOT_WIGGLE=True, NORM=True, SCALE=20)
            from scipy import signal
            # data_if = np.abs(signal.hilbert(data_if))   
            fig, _ = plot_events(data_if, x_infile, t,fig=fig, ax=ax1, 
                                DO_FILTER=False, PLOT_WIGGLE=False, NORM=False, SCALE=20)
            
            ax1.set_title('a) raw data')
            ax1.plot(t, (t+vinfo_i[1])*vinfo_i[0],'r--',lw=1)
            plt.clim([-data_if.var()**0.5*3,data_if.var()**0.5*3])
            plt.colorbar()
            x,E = sort_data_by_distance(x_infile,E)
            x = np.array(x)

            X,Y = np.meshgrid(VS,x)
            ax2 = subplot(122)
            pcolormesh(X,Y,E,cmap='nipy_spectral_r', shading='nearest', rasterized=True)
            ax2.set_title('b) E vs R')
            ax2.set_xlabel('Velocity [m/s]')
            ax2.set_yticks([])
            plt.colorbar()
            plt.suptitle(f'{e_time},v={vinfo_i[0]:.1f},F{fs:02.1f}.{fe:02.1f}')
            fig.tight_layout()
            figname = f'{FIG_ROOT}/{i:04d}.debug.{e_time}.F{fs:02.1f}.{fe:02.1f}.png'
            fig.savefig(figname)
    
        
    
    
