# pre packages
from myglobal import os, sys, LINES


# sys packages
from pylab import np
import obspy as ob
from glob import glob
from tqdm import tqdm
from obspy.core.utcdatetime import UTCDateTime
import pandas as pd
from scipy import signal


# self packages
from utils.loc import load_loc, get_distance,sort_data_by_distance
from utils.math import norm, my_vel_regress
from utils.h5data import get_event_data, save_h5_data, read_h5_data,h5glob
from utils.hsr import RawDataBase, search_events
from utils.plot import plot_traces_by_subfigures, plot_events,plot_raw_data
from utils.trace import get_tapered_slices, safe_filter, get_tapered_traces, whiten_msnoise
from corr4py.corr import corr_with_wavelets

# cmd
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-debug', action='store_true', help='method: DEBUG')
parser.add_argument('-base', default='DZ155', help='BASE_STAT, default  is DZ155')
parser.add_argument('-line', default='VFHF', help='LINE_ID, default is VFHF')
parser.add_argument('-emarker', default='', help='marker in previous processes')
parser.add_argument('-fs', default=9.0, type=float, help='start frequency before stacking, unit:Hz, type: float')
parser.add_argument('-fe', default=11.0,type=float, help='stop time before stacking, unit:Hz, type: float')
parser.add_argument('-PBH', action='store_true', help='method: 进行谱白化')
parser.add_argument('-date', default='2303',  help='which data to use, 2303, 2406, 2409, 254C')
parser.add_argument('-input', default='',  help='input corr h5file')
parser.add_argument('-output', default='',  help='output stack h5 file')
parser.add_argument('-eventfile', default='',  help='event file for selecting events')
parser.add_argument('-figroot', default='figures/5.stack.figures',  help='root to save figs')

args = parser.parse_args()
print(args)

DEBUG= args.debug
LINE_ID = args.line
BASE_STAT=args.base
EMARKER = args.emarker
PBH = args.PBH
DATE = args.date
DATA_FILE = args.input
OUTFILE = args.output
EVENT_FILE = args.eventfile
FIG_ROOT = args.figroot
fs = args.fs
fe = args.fe

LINES = LINES[DATE]
if LINE_ID in LINES.keys():
    CORR_STATS = LINES[LINE_ID]
else:
    CORR_STATS = [LINE_ID]

if DATE == '2303':
    H5_ROOT='data/2303.HSR'
    s_info = load_loc('./loc/loc_all_from_log_2303.csv')
elif DATE == '2406':
    pass
elif DATE == '2409':
    pass
elif DATE == '254C':
    H5_ROOT='data/254C.HSR'
    s_info = load_loc('./loc/254C.csv',lat_key='lat',lon_key='lon')
else:
    pass

FIG_ROOT = f'{FIG_ROOT}/{EMARKER}'
if not os.path.exists(FIG_ROOT):
    os.mkdir(FIG_ROOT)

MARKER=f'{LINE_ID}.F{fs:02.1f}.{fe:02.1f}'
if PBH:
    MARKER = f'{MARKER}.PBH'
if not OUTFILE:
    STACK_ROOT = f'{H5_ROOT}/traceSeq/{EMARKER}'
    if not os.path.exists(STACK_ROOT):
        os.mkdir(STACK_ROOT)
    OUTFILE = f'{STACK_ROOT}/stacked.{EMARKER}.{MARKER}.h5'


VLIM  = [300,5000]
v_estimate=2900

# get loc
x = []
for name in CORR_STATS:
    xi, azi,_ = get_distance(s_info=s_info,name1=BASE_STAT,name2=name, S2N=False)
    print(name,xi, xi* abs(np.cos(np.pi*(azi-55)/180)),xi-xi* abs(np.cos(np.pi*(azi-55)/180)))
    # xi = xi* abs(np.cos(np.pi*(azi-55)/180))
    x.append(xi)
# assert 1==2
x,CORR_STATS = sort_data_by_distance(x,NAMES=CORR_STATS)
print(x)
print(CORR_STATS)
x = np.array(x)
nx = len(x)

if EVENT_FILE:
    event_select = pd.DataFrame(pd.read_csv(EVENT_FILE))
    groups = list(event_select['group'].values)
    print(f'find {len(groups)} groups in {EVENT_FILE}')
else:
    groups = read_h5_data(DATA_FILE, keys=['all_groups'])[0]
    groups = [i.decode() for i in groups]
    print(f'find {len(groups)} groups in {DATA_FILE}')
groups.sort()

IT = tqdm(enumerate(groups), desc='stack bar', total=len(groups))

for i,group_i in IT:
    print(i, group_i)
    data_i, t, e_time, stats, SOURCE= read_h5_data(DATA_FILE, keys=['corr','t','te','R','S'], group_name=group_i)
    stats = [i.decode() for i in stats]
    SOURCE = SOURCE.decode()
    e_time = e_time.decode()

    nt = len(t)
    dt = t[1]-t[0]
    assert SOURCE == BASE_STAT

    if i==0:
        stacked_traces = np.zeros([nx,nt],dtype='float32')

    for j,name in enumerate(CORR_STATS):
        idx_j = stats.index(name)
        trace_j = data_i[idx_j,:]
        if np.sum(np.abs(trace_j))==0:
            print(i, j,name, 'no signal, E=',np.sum(np.abs(trace_j)))
            continue

        trace_j = signal.detrend(trace_j)
        # trace_j = get_tapered_traces(trace_j, dt, L_Taper=1, WIN_TYPE='hann')
        if PBH:
            trace_j = whiten_msnoise(trace_j, dt, fs, fe, FL=1)
        trace_j = get_tapered_traces(trace_j, dt, L_Taper=1, WIN_TYPE='hann')

        trace_j = safe_filter(trace_j, dt, ftype='bandpass', zerophase=True, freqmin=fs, freqmax=fe)
        # trace_j = get_tapered_traces(trace_j, dt, L_Taper=1, WIN_TYPE='hann')

        trace_j = norm(trace_j)
        stacked_traces[j,:] += trace_j

for j in range(nx):
    stacked_traces[j,:] = norm(stacked_traces[j,:])

if DEBUG:
    fig,ax1, ax2= plot_raw_data(stacked_traces,x,t,fs=fs,fe=fe, VLIM=VLIM, PLOT_WIGGLE=True, SCALE=60,FV_NORM=True)

    for j in range(nx):
        ax1.text(-1,x[j], CORR_STATS[j], fontsize=6)

    ax1.plot([-5,5],[-5*v_estimate,5*v_estimate])
    ax1.set_title(f'{EMARKER} {MARKER}')
    fig.tight_layout()
    figname = f'{FIG_ROOT}/stack.{EMARKER}.{MARKER}.{i:03d}.png'
    print(figname)
    fig.savefig(figname,dpi=300)

save_h5_data(file_name=OUTFILE, 
             attrs_dict=vars(args), 
             group_name='metadata',
             data_in_dict={'MARKER':MARKER},mode='w')

data_dict = {
    'stacked': stacked_traces,
    't': t,
    'x': x,
    'S': BASE_STAT,
    'R': np.array(CORR_STATS,dtype='S'),
    'NE':len(groups),
    'all_groups':np.array(groups, dtype='S'),
}

save_h5_data(OUTFILE, data_dict,  mode='a')
print(OUTFILE)

