# pre packages
from myglobal import os, sys, LINES



# sys packages
from pylab import np
import obspy as ob
from glob import glob
from tqdm import tqdm
from obspy.core.utcdatetime import UTCDateTime
import pandas as pd
from scipy.stats import linregress


# self packages
from utils.loc import load_loc, get_distance,sort_data_by_distance
from utils.math import norm, my_vel_regress
from utils.h5data import get_event_data, save_h5_data, read_h5_data
from utils.hsr import RawDataBase, search_events
from utils.plot import plot_traces
from utils.trace import get_tapered_slices, safe_filter, whiten_msnoise,get_tapered_traces
from corr4py.corr import corr_with_wavelets


# cmd
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-debug', action='store_true', help='method: DEBUG')
parser.add_argument('-mpi', action='store_true', help='method: use MPI')
parser.add_argument('-base', default='DZ155', help='BASE_STAT, default  is DZ155')
parser.add_argument('-line', default='VFHF', help='LINE_ID, default is VFHF')
parser.add_argument('-emarker', default='', help='marker in previous processes')
parser.add_argument('-fs', default=9.0, type=float, help='start frequency before stacking, unit:Hz, type: float')
parser.add_argument('-fe', default=11.0,type=float, help='stop time before stacking, unit:Hz, type: float')
parser.add_argument('-PBH', action='store_true', help='method: 进行谱白化')
parser.add_argument('-date', default='2303',  help='which data to use, 2303, 2406, 2409, 254C')

parser.add_argument('-input', default='',  help='input corr h5file')
parser.add_argument('-output', default='',  help='output stack h5 file')
parser.add_argument('-eventfile', default='',  help='event file for selecting events')
parser.add_argument('-figroot', default='figures/6.traceSeq.figures',  help='root to save figs')

parser.add_argument('-tshift', default=0.0, type=float, help='shift time in stacking, unit:s, type: float')
args = parser.parse_args()
print(args)

DEBUG= args.debug
USE_MPI = args.mpi
LINE_ID = args.line
BASE_STAT=args.base
EMARKER = args.emarker
PBH = args.PBH
DATE = args.date
DATA_FILE = args.input
OUTFILE = args.output
EVENT_FILE = args.eventfile
FIG_ROOT = args.figroot

LINES = LINES[DATE]
if LINE_ID in LINES.keys():
    CORR_STATS = LINES[LINE_ID]
else:
    CORR_STATS = [LINE_ID]

T0_SHIFT= args.tshift
fs = args.fs
fe = args.fe

if DATE == '2303':
    H5_ROOT='data/2303.HSR'
    s_info = load_loc('./loc/loc_all_from_log_2303.csv')
    START_TIME = '2023-03-10T00:00:00Z'

elif DATE == '2406':
    pass
elif DATE == '2409':
    pass
elif DATE == '254C':
    H5_ROOT='data/254C.HSR'
    s_info = load_loc('./loc/254C.csv',lat_key='lat',lon_key='lon')
    START_TIME = '2024-10-08T00:00:00Z'

else:
    pass

L_CUT=6

MARKER=f'{LINE_ID}.TO{T0_SHIFT:.1f}.F{fs:02.1f}.{fe:02.1f}'
if PBH:
    MARKER = f'{MARKER}.PBH'

FIG_ROOT = f'{FIG_ROOT}/{EMARKER}.{MARKER}'
if not os.path.exists(FIG_ROOT):
    os.mkdir(FIG_ROOT)

if not OUTFILE:
    SEQ_ROOT = f'{H5_ROOT}/traceSeq/{EMARKER}'
    OUTFILE = f'{SEQ_ROOT}/traces.{EMARKER}.{MARKER}.h5'
    if not os.path.exists(SEQ_ROOT):
        os.mkdir(SEQ_ROOT)

if EVENT_FILE:
    event_select = pd.DataFrame(pd.read_csv(EVENT_FILE))
    groups = list(event_select['group'].values)
    print(f'find {len(groups)} groups in {EVENT_FILE}')
else:
    groups = read_h5_data(DATA_FILE, keys=['all_groups'])[0]
    groups = [i.decode() for i in groups]
    print(f'find {len(groups)} groups in {DATA_FILE}')
groups.sort()

NE = len(groups)
print(f'save final data to {OUTFILE}')
save_h5_data(file_name=OUTFILE, 
             attrs_dict=vars(args), 
             group_name='metadata',
             data_in_dict={'MARKER':MARKER},mode='w')

ns = len(CORR_STATS)
IT = tqdm(enumerate(groups), desc='stack bar', total=len(groups))
# IT = enumerate(files)

te_all = np.zeros([NE])
for i,group_i in IT:
    # print(i, file_i)
    data_i, t, e_time, stats, SOURCE= read_h5_data(DATA_FILE, keys=['corr','t','te','R','S'], group_name=group_i)
    stats = [i.decode() for i in stats]
    SOURCE = SOURCE.decode()
    e_time = e_time.decode()

    nt = len(t)
    dt = t[1]-t[0]
    nt_cut = int(L_CUT/dt)
    if i==0:
        all_traces_slices = np.zeros([NE, ns, nt_cut])

    assert SOURCE == BASE_STAT

    te_all[i]=UTCDateTime(e_time)-UTCDateTime(START_TIME)
    
    for j,name in enumerate(CORR_STATS):
        idx_j = stats.index(name)
        trace_j = data_i[idx_j,:]
        if np.sum(np.abs(trace_j))==0:
            print(i, j,name, 'no signal, E=',np.sum(np.abs(trace_j)))
            continue

        # 取固定长度的窗口截断
        trace_j = get_tapered_traces(trace_j, dt, L_Taper=1, WIN_TYPE='hann')
        if PBH:
            trace_j = whiten_msnoise(trace_j, dt, fs, fe, FL=1)
        trace_j = get_tapered_traces(trace_j, dt, L_Taper=1, WIN_TYPE='hann')

        trace_j = safe_filter(trace_j, dt, ftype='bandpass', zerophase=True, freqmin=fs, freqmax=fe)
        
        # 截取有效信号，所有台都一样
        t_arrival = T0_SHIFT
        tsp = np.argmin(np.abs(t-(t_arrival-L_CUT/2)))
        tep = tsp+int(L_CUT/dt)
        
        trace_j_cut,tn = get_tapered_slices(trace_j, t, [t[tsp],t[tep]], L_Taper=0.5, WIN_TYPE='hann')
        trace_j_cut = norm(trace_j_cut)

        all_traces_slices[i,j,:] = trace_j_cut

# ## 参考道生成
ref_traces_slices = all_traces_slices.sum(axis=0)
for j in range(ns):
    ref_traces_slices[j,:] = norm(ref_traces_slices[j,:])

# 保存文件
print(f'save final data to {OUTFILE}')
data_dict = {
        'traces':all_traces_slices,
        'te_all':te_all,
        't':tn,
        'ref':ref_traces_slices,
        't_shift':T0_SHIFT,
        'S': BASE_STAT,
        'R': np.array(CORR_STATS,dtype='S'),
        'all_groups':np.array(groups, dtype='S'),
    }
save_h5_data(OUTFILE,data_dict,mode='a')

if DEBUG:
    from pylab import plt

    # for j,name in enumerate(CORR_STATS):
    #     plt.close('all')
    #     fig, ax = plot_traces(all_traces_slices[::10,j,:],tn[:], NORM=True, x=None)
    #     # fig, ax = plot_traces(traces_all_sumed[:,:],tn, NORM=True, x=te_all/3600/24)
    #     # ax.set_xlim([-2,2])
    #     # ax.set_ylim([200,1000])
    #     # ax.set_ylim([13, 17])
    #     ax.set_title(f'{name}.{MARKER}')
    #     ax.set_ylabel('N')
    #     fig.tight_layout()
    #     figname=f'{FIG_ROOT}/{name}.{EMARKER}.{MARKER}.png'
    #     fig.savefig(figname)
    #     print(figname)
    
    from utils.plot import plot_raw_data
    VLIM  = [300,5000]
    v_estimate=2900
    # get loc
    x = []
    for name in CORR_STATS:
        xi, azi,_ = get_distance(s_info=s_info,name1=BASE_STAT,name2=name, S2N=False)
        print(name,xi, xi* abs(np.cos(np.pi*(azi-55)/180)),xi-xi* abs(np.cos(np.pi*(azi-55)/180)))
        x.append(xi)
    VLIM  = [300,5000]
    v_estimate=2900
    fig,ax1, ax2= plot_raw_data(ref_traces_slices,x,tn,fs=fs,fe=fe, VLIM=VLIM, PLOT_WIGGLE=True, SCALE=60,FV_NORM=True)

    for j in range(ns):
        ax1.text(-1,x[j], CORR_STATS[j], fontsize=6)

    ax1.plot([-5,5],[-5*v_estimate,5*v_estimate])
    ax1.set_title(f'{EMARKER} {MARKER}')
    fig.tight_layout()
    figname = f'{FIG_ROOT}/ref.{EMARKER}.{MARKER}.png'
    print(figname)
    fig.savefig(figname,dpi=300)
