# coding: UTF-8
from pickle import LIST
from typing import List, Tuple
import obspy as ob
import numpy as np
import h5py
import glob
from datetime import datetime
from obspy.core.utcdatetime import UTCDateTime
import os
from scipy import signal
from tqdm import tqdm

from .math import norm
from .h5data import read_h5_data,save_h5_data, get_h5_keys
from .trace import array2Trace, get_tapered_slices

from .loc import sort_data_by_distance
from .math import correct_var_by_remove
# from paras import *

DT=0.01
REMOVE_MEET_EVENT=False
BLACK_LIST_TIME=[]
DEBUG = False

def search_events(data_raw:np.array,t:np.array, PRINT=True, mean_win=1, 
                  marker='',
                      OBS_WIN=10, STEP=0.5, STEP_EVENT=70, AMP_HSR=5e-5, MAX_AMP=0.001) -> List:
    # 单台 粗筛 高铁事件
    events_time = []

    dt = t[1]-t[0]
    nt = len(t)
    f = 1/dt/2

    FL = int(1/dt*mean_win)//2*2

    mean_filter = np.ones([FL]) 
    
    tp=0
    while tp<nt:
        amp_tp = 2**0.5 * np.mean(data_raw[tp:tp+int(STEP/dt)]**2)**0.5
        if  amp_tp < AMP_HSR:
            tp = tp+int(STEP/dt)
            continue

        tp1 = max(0, int(tp-OBS_WIN/dt))
        tp2 = int(tp+OBS_WIN/dt)
        if tp2 >= nt:
            break

        trace = data_raw[tp1:tp2]
        trace_max = np.abs(trace).max()
        
        if  trace_max > MAX_AMP:
            tp = tp+int(OBS_WIN/dt)*2
            continue

        d_h = np.abs(signal.hilbert(trace))
        npts = len(d_h)
        d_h_mean = np.zeros_like(d_h)
        d_h_mean[(FL//2):(npts-FL//2+1)] = np.convolve(d_h, mean_filter,'valid')/len(mean_filter)
        d_amp = d_h_mean/(d_h_mean.max()+1e-10)

        arrive_loc = np.where(d_amp>0.2)[0].min()
        arrive_t = t[arrive_loc+tp1]
        
        events_time.append(arrive_t)

        if PRINT:
            print('\r {}|Detect:{:3d} Events in Time {:5.1f}|{:5.0f}, AMP:{:.6f}m/s'.format(
                            marker,len(events_time), t[tp], t.max(), trace_max),
                    end='', flush=True)
        
        tp = arrive_loc+tp1+int(STEP_EVENT/dt)
        
    print('')

    return events_time


def hough_transform_detection(data, x, t_health, velocity_range=[-150,150], DEBUG=False):
    """
    使用霍夫变换检测事件速度
    
    Parameters:
    data: 健康数据，形状为[len(STATS), len(t_health)]
    x: 台站位置，形状为[len(STATS),]
    t_health: 时间轴
    velocity_range: 速度搜索范围 [v_min, v_max]
    accumulator_threshold: 累积器阈值
    
    Returns:
    detected_events: 检测到的事件参数 [(velocity, intercept, accumulator_value), ...]
    hough_space: 霍夫空间
    velocities: 速度轴
    intercepts: 截距轴
    """
    # 设置速度搜索范围
    if velocity_range is None:
        v_min, v_max = -5000, 5000  # m/s
    else:
        v_min, v_max = velocity_range
    
    # 创建霍夫空间
    n_velocities = 50
    n_intercepts = len(t_health)*3
    
    velocities = np.linspace(v_min, v_max, n_velocities)
    # 截距范围根据数据的时间和距离范围确定
    t_min, t_max = t_health.min(), t_health.max()
    x_min, x_max = x.min(), x.max()
    
    intercept_min = t_min-(t_max-t_min)
    intercept_max = t_max+(t_max-t_min)
    intercepts = np.linspace(intercept_min, intercept_max, n_intercepts)
    # 初始化霍夫空间累积器
    hough_space = np.zeros((n_velocities, n_intercepts))
    
    # 对每个台站进行处理
    idx_base = np.argmin(np.abs(x))
    max_xi = np.argmax(x)
    min_xi = np.argmin(x)
    for i in range(len(x)):

        xi = x[i]
        # 找到健康数据中为1的点（表示可能的事件点）
        event_indices = np.where(data[i] == 1)[0]
        
        # 对每个可能的事件点进行霍夫变换
        for idx in event_indices:
            ti = t_health[idx]
            
            # 计算通过该点的所有直线在霍夫空间中的贡献
            for j, v in enumerate(velocities):
                # 根据 x = v*(t + b) 计算截距 b
                b = xi/v - ti
                # 找到最近的截距索引
                
                if b < intercept_min or b > intercept_max:
                    continue

                b_idx = np.argmin(np.abs(intercepts - b))
                if 0 <= b_idx < n_intercepts:
                    hough_space[j, b_idx] += 1
    

    # 使用阈值筛选显著的累积值
    threshold_mask = hough_space >= hough_space.max()
    
    # 查找局部最大值点
    pp = np.where(threshold_mask)
    if len(pp[0])==0:
        detected_events = []
    else:
        detected_events = []
        for i, j in zip(pp[0], pp[1]):
            is_local_max = True
            for di in [-1, 0, 1]:
                for dj in [-1, 0, 1]:
                    if di == 0 and dj == 0:
                        continue
                    ni, nj = i + di, j + dj
                    if 0 <= ni < n_velocities and 0 <= nj < n_intercepts:
                        if hough_space[ni, nj] > hough_space[i, j]:
                            is_local_max = False
                            break
                if not is_local_max:
                    break
            if is_local_max:
                detected_events.append((velocities[i], intercepts[j], hough_space[i, j]))
        # 按照累积值降序排列
        detected_events.sort(key=lambda x: x[2], reverse=True)
    if DEBUG:
        return detected_events, hough_space, velocities, intercepts
    else:
        return detected_events


def hsr_vel_detection(data, x, t_health, velocity_range=[60,110], DEBUG=False, EMIN=None):
    '''
    根据速度和位置信息，对健康数据进行事件检测,输出速度和位置信息
    输入:
    data:可能的事件振幅信息
    x:台站位置
    t_health:振幅数据时间轴
    velocity_range:速度范围
    DEBUG:是否输出调试信息
    '''
    
    x,data = sort_data_by_distance(x,data)
    x = np.array(x)

    nx,nt = data.shape
    EMIN = EMIN if EMIN is not None else nx/10
    VMIN, VMAX = velocity_range

    aidx = np.where(data==1)
    NP = len(aidx[0])
    xp = np.zeros([NP])
    tp = np.zeros([NP])
    mp = np.zeros([NP])
    for k in range(NP):
        xp[k]=x[aidx[0][k]]
        tp[k]=t_health[aidx[1][k]]
        
    detected_events = [[0,0,0]]
    if len(xp)<EMIN:
        return detected_events
    
    x4search = [0,x.min(),x.max()]
    for xj in x4search:
        base_points_idx = np.where(xp==xj)[0]
    
        for i in base_points_idx:

            xp0 = xp[i]
            tp0 = tp[i]
            
            v_est = (xp-xp0)/(tp-tp0+1e-10)
            # idx_valid = np.where(v_est>VMIN & v_est<VMAX & mp==0)[0]
            
            idx_valid = np.where(np.all([v_est>VMIN, v_est<VMAX, mp==0], axis=0))[0]
            # print(np.all([v_est>VMIN, v_est<VMAX, mp==0], axis=0))
            # print(len(aidx),len(idx_valid), VMIN,VMAX,EMIN)
            if len(idx_valid)>=EMIN:
                A,idx_valid = correct_var_by_remove(data=v_est[idx_valid],t_data = idx_valid,sigma_CUT=3)
                p_fit = np.polyfit(tp[idx_valid],xp[idx_valid],1)
                v,x0 = p_fit

                if v<VMIN or v>VMAX:
                    continue
                
                detected_events.append([v,x0/v, len(idx_valid)])
                mp[idx_valid] = len(detected_events)-1

            if len(np.where(mp==0)[0])<EMIN:
                break

        idx_remaining = np.where(mp==0)[0]
        xp = xp[idx_remaining]
        tp = tp[idx_remaining]
        mp = mp[idx_remaining]
        if len(mp)<EMIN:
            break
    
    detected_events.sort(key=lambda x: x[2], reverse=True)
    return detected_events
    
    
def get_hsr_slices_N(data_infile, t,
                   CORR_STATS, BASE_STAT, L_WIN, 
                    v_info,x_infile,STATS_infile,
                    TS_WAVELET,TE_WAVELET,
                    USE_D1_EVENTS,USE_D2_EVENTS,USE_D3_EVENTS,
                    configs=None):
    """
    从原始数据中截取对应台站序列特定时间段的记录,同时避免列车信号
    优先选取列车在北部的情况
        
    Returns:
    traces_cut : numpy.ndarray
        处理后的数据，形状为[n_stations, n_samples_cut]
        其中n_stations为CORR_STATS中的台站数，n_samples_cut为截取后的时间点数
    t_cut : numpy.ndarray
        截取后的时间轴数组，形状为(n_samples_cut,)
    """

    tmin, tmax = t[0], t[-1]
    nt = len(t)
    dt = t[1]-t[0]
    ns = len(CORR_STATS)
    base_idx = CORR_STATS.index(BASE_STAT)

    traces = []
    x_raw = []
    for j,name in enumerate(CORR_STATS):
        idx_j = STATS_infile.index(name)
        trace_j = data_infile[idx_j,:]
        # trace_j = filtfilt(trace_j,dt,[9,14], order=4)
        traces.append(trace_j)
        x_raw.append(x_infile[idx_j])
    traces = np.array(traces)
    x_raw = np.array(x_raw)

    LT = L_WIN
    LT_MIN = (tmax-tmin)/3
    LTP = int(np.round(LT/dt))
    traces_out = np.zeros([ns,LTP], dtype=np.float32)
    t_out = np.zeros([LTP],dtype=np.float32)

    # 列车速度记录
    direction = v_info[0]
    v1,a1,e1 = v_info[1:4]
    v2,a2,e2 = v_info[4:]
    
    t_hsr_S2N = x_raw/(v1+1e-10)-a1 # S2N理论到时
    t_hsr_min1 = np.min(t_hsr_S2N+TS_WAVELET)
    t_hsr_max1 = np.max(t_hsr_S2N+TE_WAVELET)

    t_hsr_N2S = x_raw/(v2+1e-10)-a2 # 到时
    t_hsr_min2 = np.min(t_hsr_N2S+TS_WAVELET)
    t_hsr_max2 = np.max(t_hsr_N2S+TE_WAVELET)

    # 对测线无影响事件
    # event_S2N, tmin, tmax, event_S2N
    if (t_hsr_min1<tmin or t_hsr_max1>tmax) and direction!=0:
        direction=2
    # event_N2S, tmin, tmax, event_N2S
    if (t_hsr_min2<tmin or t_hsr_max2>tmax) and direction!=0:
        direction=1

    # 根据方向选择时间窗口
    if direction==1: # S2N
        # 只选列车在北方的窗口
        # tmin,t_hsr_min,[t_hsr_max, tmax]
        TLIM = t_hsr_max1,t_hsr_max1+LT
        if not USE_D1_EVENTS:
            TLIM=[100000,-10000000]

    elif direction==2: # N2S
        # 只选列车在北方的窗口
        # [tmin,t_hsr_min],t_hsr_max, tmax
        TLIM = t_hsr_min2-LT,t_hsr_min2
        if not USE_D2_EVENTS:
            TLIM=[100000,-10000000]
        
        
    elif direction==3: # 双向列车的复杂情况
        
        # 选存在列车在北向的窗口
        # 交汇点在线北面
        # tmin,[t_hsr_min1,t_hsr_max1], [t_hsr_min2,t_hsr_max2], tmax   
        if t_hsr_min1<=t_hsr_min2:
            
            if t_hsr_min2-t_hsr_max1>LT*0.6:     # 双车在北
                TLIM = t_hsr_max1, min(t_hsr_min2,t_hsr_max1+LT)
            elif tmax-t_hsr_max2>t_hsr_min1-tmin:# 右侧时间多
                TLIM = t_hsr_max2, t_hsr_max2+LT
            elif t_hsr_min1-tmin>tmax-t_hsr_max2:# 左侧时间多
                TLIM = t_hsr_min1-LT, t_hsr_min1
            else:
                raise ValueError('no specified direction.')
        # 交汇点在线南面
        # tmin,[t_hsr_min2,t_hsr_max2], [t_hsr_min1,t_hsr_max1], tmax        
        else:
            if tmax-t_hsr_max1>t_hsr_min2-tmin: # 右侧时间多
                TLIM = t_hsr_max1, t_hsr_max1+LT
            else:                               # 左侧时间多
                TLIM = t_hsr_min2-LT, t_hsr_min2
        if not USE_D3_EVENTS:
            TLIM=[100000,-10000000]
    else:
        
        # TLIM=[0,LT]
        TLIM=[100000,-10000000]
        print(TLIM)
        print(f'no specified direction. {TLIM=}')
    TLIM = max(TLIM[0], tmin),min(TLIM[1], tmax)

    if TLIM[1]-TLIM[0]<10:
        # print(v1,a1,e1,direction)
        # print([t_hsr_min1,t_hsr_max1])
        # print(v2,a2,e2,direction)
        # print([t_hsr_min2,t_hsr_max2])  
        # print(f'TLIM {TLIM=} is too large and unexpected.')
        return traces_out,t_out, [t_out[0],t_out[-1]],False
    else:
        print(BASE_STAT, TLIM, 'Tpos.N')

    traces_cut,t_cut = get_tapered_slices(traces, t, TLIM, L_Taper=1)
    nt_cut = len(t_cut)
    if nt_cut <LTP:
        traces_out[:,:nt_cut] = traces_cut
        t_out = t_cut[0]+np.arange(LTP)*dt
    elif nt_cut ==LTP:
        traces_out = traces_cut
        t_out = t_cut
    else:
        raise ValueError(f'{TLIM=}is too large and unexpected.{nt_cut=},{LTP=} ')

    return traces_out,t_out, TLIM,True

def get_hsr_slices_S(data_infile, t,
                   CORR_STATS, BASE_STAT, L_WIN, 
                    v_info,x_infile,STATS_infile,
                    TS_WAVELET,TE_WAVELET,
                    USE_D1_EVENTS,USE_D2_EVENTS,USE_D3_EVENTS,
                    configs=None):
    """
    从原始数据中截取对应台站序列特定时间段的记录,同时避免列车信号
    优先选取列车在南部的情况
        
    Returns:
    traces_cut : numpy.ndarray
        处理后的数据，形状为[n_stations, n_samples_cut]
        其中n_stations为CORR_STATS中的台站数，n_samples_cut为截取后的时间点数
    t_cut : numpy.ndarray
        截取后的时间轴数组，形状为(n_samples_cut,)
    """

    tmin, tmax = t[0], t[-1]
    nt = len(t)
    dt = t[1]-t[0]
    ns = len(CORR_STATS)
    base_idx = CORR_STATS.index(BASE_STAT)

    traces = []
    x_raw = []
    for j,name in enumerate(CORR_STATS):
        idx_j = STATS_infile.index(name)
        trace_j = data_infile[idx_j,:]
        # trace_j = filtfilt(trace_j,dt,[11,15], order=4)
        traces.append(trace_j)
        x_raw.append(x_infile[idx_j])
    traces = np.array(traces)
    x_raw = np.array(x_raw)

    LT = L_WIN
    LT_MIN = (tmax-tmin)/3
    LTP = int(np.round(LT/dt))
    traces_out = np.zeros([ns,LTP], dtype=np.float32)
    t_out = np.zeros([LTP],dtype=np.float32)

    # 列车速度记录
    direction = v_info[0]
    v1,a1,e1 = v_info[1:4]
    v2,a2,e2 = v_info[4:]

    t_hsr_S2N = x_raw/(v1+1e-10)-a1 # S2N理论到时
    t_hsr_min2 = np.min(t_hsr_S2N+TS_WAVELET)
    t_hsr_max2 = np.max(t_hsr_S2N+TE_WAVELET)

    t_hsr_N2S = x_raw/(v2+1e-10)-a2 # 到时
    t_hsr_min1 = np.min(t_hsr_N2S+TS_WAVELET)
    t_hsr_max1 = np.max(t_hsr_N2S+TE_WAVELET)
    
    if direction==1:
        direction=2
    elif direction==2:
        direction=1

    # 对测线无影响事件
    # event_N2S, tmin, tmax, event_N2S
    if (t_hsr_max1<tmin or t_hsr_min1>tmax) and direction!=0:
        direction=2
    # event_S2N, tmin, tmax, event_S2N
    if (t_hsr_max2<tmin or t_hsr_min2>tmax) and direction!=0:
        direction=1

    # 根据方向选择时间窗口
    if direction==1: # N2S
        # 只选列车在南方的窗口
        # tmin,t_hsr_min,[t_hsr_max, tmax]
        TLIM = t_hsr_max1,t_hsr_max1+LT
        if not USE_D2_EVENTS:
            TLIM=[100000,-10000000]

    elif direction==2: # S2N
        # 只选列车在南方的窗口
        # [tmin,t_hsr_min],t_hsr_max, tmax
        TLIM = t_hsr_min2-LT,t_hsr_min2
        if not USE_D1_EVENTS:
            TLIM=[100000,-10000000]
        
    elif direction==3: # 双向列车的复杂情况
        
        # 选存在列车在南向的窗口
        # 交汇点在线頁面
        # tmin,[t_hsr_min1,t_hsr_max1], [t_hsr_min2,t_hsr_max2], tmax   
        if t_hsr_min1<=t_hsr_min2:
            
            if t_hsr_min2-t_hsr_max1>LT*0.6:     # 双车在北
                TLIM = t_hsr_max1, min(t_hsr_min2,t_hsr_max1+LT)
            elif tmax-t_hsr_max2>t_hsr_min1-tmin:# 右侧时间多
                TLIM = t_hsr_max2, t_hsr_max2+LT
            elif t_hsr_min1-tmin>tmax-t_hsr_max2:# 左侧时间多
                TLIM = t_hsr_min1-LT, t_hsr_min1
            else:
                raise ValueError('no specified direction.')
        # 交汇点在线南面
        # tmin,[t_hsr_min2,t_hsr_max2], [t_hsr_min1,t_hsr_max1], tmax        
        else:
            if tmax-t_hsr_max1>t_hsr_min2-tmin: # 右侧时间多
                TLIM = t_hsr_max1, t_hsr_max1+LT
            else:                               # 左侧时间多
                TLIM = t_hsr_min2-LT, t_hsr_min2

        if not USE_D3_EVENTS:
            TLIM=[100000,-10000000]
    else:
        
        TLIM=[100000,-10000000]
        print(TLIM)
        print(f'no specified direction. {TLIM=}')

    TLIM = max(TLIM[0], tmin),min(TLIM[1], tmax)

    if TLIM[1]-TLIM[0]<10:
        # print(v1,a1,e1,direction)
        # print([t_hsr_min1,t_hsr_max1])
        # print(v2,a2,e2,direction)
        # print([t_hsr_min2,t_hsr_max2])  
        # print(f'TLIM {TLIM=} is too large and unexpected.')
        return traces_out,t_out, [t_out[0],t_out[-1]],False
    else:
        print(BASE_STAT,TLIM,'Tpos.S')

    traces_cut,t_cut = get_tapered_slices(traces, t, TLIM, L_Taper=1)
    nt_cut = len(t_cut)
    if nt_cut <LTP:
        traces_out[:,:nt_cut] = traces_cut
        t_out = t_cut[0]+np.arange(LTP)*dt
    elif nt_cut ==LTP:
        traces_out = traces_cut
        t_out = t_cut
    else:
        raise ValueError(f'{TLIM=}is too large and unexpected.{nt_cut=},{LTP=} ')

    return traces_out,t_out, TLIM,True


    


class Station:
    def __init__(self, folder,name, day, CHNS=['Z'], fmt='{name}.*.{day}.*.{chn}.sac', 
                 LOAD_DATA=False, FS=[0.2,20], 
                 DW=1, dt=DT,
                 day_fmt='%Y.%m.%d',
                 **args) -> None:
        self.name = name
        self.folder = folder
        self.day = day
        self.date = UTCDateTime(datetime.strptime(day,day_fmt))
        self.CHNS = CHNS
        self.sac_name_fmt = fmt
        self.FS = FS
        self.DW = DW

        self.sac_raw = []
        self.data_raw = []

        self.dt = dt
        self.nt = 3600*24/dt+1
        self.t = np.arange(0,self.nt)*dt
        self.AMP = 0
        self.REMOVE_MEET_EVENT = REMOVE_MEET_EVENT
        self.args = args
        self.sac_name_test = os.path.join(self.folder, 
                                          self.sac_name_fmt.format(name=self.name, day=self.day, chn=self.CHNS[0]))
        

        if LOAD_DATA:
            self.load_sac_data(True)

    def file_exist(self, file_name=None)-> bool:

        if file_name is None:
            file_name = self.sac_name_test
        filenames = glob.glob(file_name)

        if len(filenames)==0:
            print(file_name, 'found:', len(filenames), False)
            return False
        else: 
            # file_name = filenames[0]
            d_try = ob.read(file_name, only_head=True)
            d_try = d_try.merge(fill_value=0)[0]
            # print(d_try.stats)
            dt = d_try.stats.delta
            nt = d_try.stats.npts-1
            delta = np.abs(nt*dt-3600*24)
            # print(delta)
            if delta<dt:
                # print(file_name, True, 'lt={:.3f}s'.format(delta))
                return True
            elif delta<3600*12:
                print(file_name, True, 'lt={:.3f}s<12H'.format(delta))
                return True
            else:
                print(file_name, False, 'lt={:.3f}s'.format(delta))
                return False

    def __read_sac(self, file_name):
        # print('load SAC file {}'.format(filename))
        if not self.file_exist(file_name):
            return False
        # sac_raw = ob.read(file_name)[0]
        sac_raw = ob.read(file_name)
        sac_raw = sac_raw.merge(fill_value=0)[0]
        data = sac_raw.data
        data[np.where(np.abs(data)>1e18)]=0
        
        dt = sac_raw.stats.delta
        nt = sac_raw.stats.npts
        t_s = sac_raw.stats.starttime-self.date

        # assert 1==2
        t = np.arange(t_s, t_s+nt*dt+dt, dt)

        f = 1/dt/2
        fs,fe = self.FS
        b,a = signal.butter(5,[fs/f,fe/f],'bandpass')    # raw data from 1~50HZ, used for arriving time picking
        data_raw  = signal.filtfilt(b, a, data)
            
        if self.DW is not None and self.DW>1:
            ratio = self.DW 
            f = f/ratio
            if np.abs(dt - DT)/dt > 1e-6:
                raise ValueError('Get {}| Target {}| Rate {}| Name {}'.format(dt, DT, ratio, self.name))
            if f>fe:
                raise ValueError('in downsample, fe {}> f/DW {}'.format(fe, f))

            data_raw = data_raw[0::ratio]
            t = t[0::ratio]
            
        return t, data_raw, sac_raw

    def load_sac_data(self,KEEP_SAC=False) -> bool:

        sac_raw = []
        data_raw = []
        for chn in self.CHNS:
            sac_name = os.path.join(self.folder, self.sac_name_fmt.format(name=self.name, day=self.day, chn=chn))
            t, data_raw_i, sac_raw_i = self.__read_sac(sac_name)

            if np.abs(t[1]-t[0]-self.dt)/self.dt>1e-6:
                
                raise ValueError(f'dt={t[1]-t[0]} in file VS {self.dt}, nt={len(t)} vs {self.nt} unmatched')
                
            data_raw.append(data_raw_i)
            sac_raw.append(sac_raw_i)
            # print(self.name, chn, data_raw_i[360000:720000].max())
        self.data_raw = data_raw
        self.t = t
        self.dt = t[1]-t[0]
        self.nt = len(t)

        if KEEP_SAC:
            self.sac_raw = sac_raw 
        self.AMP=data_raw[0].max()

        return True

    def __get_chn_data_slice(self, RAWDATA, start_time:float, end_time:float, npts = 0):
        # UTC:+0, Landon Time
        t_s = self.t[0]
        sp = int( np.round( (start_time-t_s)/self.dt ) )
        ep = int( np.round( (end_time-t_s)  /self.dt ) )
        # 
        if sp<0 or ep>len(self.t):
            data = np.zeros([ep-sp])
            print('\n$$$$$$$$$$$$$$$$$$$$$$')
            print(f'{self.name}: slice {start_time:.2f}:{end_time:.2f} not match from {t_s:.2f}-{self.t[-1]:.2f}, fill 0.')
            print('$$$$$$$$$$$$$$$$$$$$$$')
        else:
            data = RAWDATA[sp:ep]
        if npts != 0 and data.size!=npts:
            print(t_s, self.dt)
            raise ValueError('\n unmatched size',start_time, end_time, sp, ep, ep-sp, (start_time-t_s),(end_time-t_s))
        
        return data

    def get_event_data(self, start_time:float, end_time:float, npts = 0):
        
        data_chns=[]

        for RAWDATA in self.data_raw:
            data=self.__get_chn_data_slice(RAWDATA, start_time, end_time,npts)
            data_chns.append(data)
            # print(data.max())
        return data_chns
    
    def search_events(self, PRINT=True, mean_win=1, REF_CHN='Z', 
                      OBS_WIN=10, STEP=0.5, STEP_EVENT=70, AMP_HSR=5e-5, MAX_AMP=0.001) -> List:
          # 单台 粗筛 高铁事件
        events_time = []

        t = self.t
        nt = self.nt
        f = 1/self.dt/2
        dt = self.dt

        data_raw = self.data_raw[self.CHNS.index(REF_CHN)]

        FL = int(1/dt*mean_win)//2*2
        # print(FL, dt)
        mean_filter = np.ones([FL]) 
        
        tp=0
        while tp<nt:
            amp_tp = 2**0.5 * np.mean(data_raw[tp:tp+int(STEP/dt)]**2)**0.5
            if  amp_tp < AMP_HSR:
                tp = tp+int(STEP/dt)
                continue

            tp1 = max(0, int(tp-OBS_WIN/dt))
            tp2 = int(tp+OBS_WIN/dt)
            if tp2 >= nt:
                break

            trace = data_raw[tp1:tp2]
            trace_max = np.abs(trace).max()
            
            if  trace_max > MAX_AMP:
                tp = tp+int(OBS_WIN/dt)*2
                continue

            d_h = np.abs(signal.hilbert(trace))
            npts = len(d_h)
            d_h_mean = np.zeros_like(d_h)
            d_h_mean[(FL//2):(npts-FL//2+1)] = np.convolve(d_h, mean_filter,'valid')/len(mean_filter)
            d_amp = d_h_mean/(d_h_mean.max()+1e-10)

            arrive_loc = np.where(d_amp>0.2)[0].min()
            arrive_t = t[arrive_loc+tp1]
            
            events_time.append(arrive_t)

            if PRINT:
                print('\r Detect:{:3d} Events in Time {:5.1f}|{:5.0f}, AMP:{:.6f}m/s |{}'.format(
                                len(events_time), t[tp], t.max(), trace_max, self.name),
                        end='', flush=True)
            
            tp = arrive_loc+tp1+int(STEP_EVENT/dt)
            
        print('')
        # print(self.data_raw[0][360000:720000].max())
        return events_time

    def get_all_data(self):
        file_data = {'data':self.data, 'data_raw':self.data_raw,'dt': self.dt}
        return file_data

class TrainEvents:
    def __init__(self,day='20190901', SACPATH='./sac/SAC100Hz', 
                EVENT_LENGTH=100,
                CHNS        =['Z'],
                fmt         ='{name}.*.{day}.*.{chn}.sac', 
                FS          =[0.2,20], 
                DW          =1,
                dt          =DT,
                **args
                            ) -> None:
        self.day = day
        self.events_time = []
        self.NE = 0 # 事件数量
        self.NS = 0 # 台站数量
        self.stations = {}
        self.stat_names = []
        self.events_data = None
        self.BASE_STAT = None

        self.SACPATH = SACPATH
        self.EVENT_LENGTH = EVENT_LENGTH
        self.CHNS = CHNS
        self.fmt = fmt
        self.FS = FS
        self.DW = DW
        self.dt = dt
        self.args = args
    
    def activate_stations(self, names=[],LOAD_DATA=True) -> None:
        
        # print(names)
        valid_names = []
        for name in names:
            folder = os.path.join(self.SACPATH, name)
            stat = Station(folder,name=name, day=self.day, CHNS=self.CHNS, 
                            fmt=self.fmt, 
                            LOAD_DATA=False, 
                            FS=self.FS, 
                            DW=self.DW, 
                            dt=self.dt, 
                            **self.args)
            stat_has_file = stat.file_exist()

            if stat_has_file:
                valid_names.append(name)
                self.stations[name]=stat
                if LOAD_DATA:
                    stat.load_sac_data(self.CHNS)

        self.stat_names = valid_names
        print(self.stat_names)
        self.NS = len(self.stations.keys())

    def get_station(self, name:str) -> Station:
        return self.stations[name]

    def get_events(self, BASE_STAT = 'DZ075',AMP_HSR=5e-5, MAX_AMP=0.001) -> Tuple:
        base_stat = self.get_station(BASE_STAT)
        t = base_stat.t
        dt = base_stat.dt
        event_times = base_stat.search_events(AMP_HSR=AMP_HSR,MAX_AMP=MAX_AMP)
        
        EVENT_LENGTH = self.EVENT_LENGTH
        s0 = -EVENT_LENGTH/2
        e0 =  EVENT_LENGTH/2
        npts = int(np.round(EVENT_LENGTH/dt))
        ns = self.NS
        ne = len(event_times)
        records = np.zeros([ne,ns, len(base_stat.CHNS),npts])

        print(records.shape)

        for i_e, event_t in enumerate(event_times):
            print('\r generate dataset for event: {:.0f}|{}|{}|{}'.format(event_t, i_e, ne, BASE_STAT), end='', flush=True)
            for i_s, name in enumerate(self.stat_names):
                stat = self.get_station(name)
                # print(stat.data_raw[0][360000:720000].max())
                s = s0+event_t
                e = e0+event_t
                # print(s,e)
                records_i= stat.get_event_data(s,e,npts=npts)

                records[i_e, i_s,:,:] = np.array(records_i)
                # print(np.array(records_i).max(),records_i[0].max())
        print('')
        self.events_time = event_times
        self.events_data = records
        self.NE = ne
        self.BASE_STAT = BASE_STAT
        return 1

    def save_events(self, folder='./data',file_name=None, TYPE='H5'):

        chns_str=''.join(i for i in self.CHNS)
        if file_name is None:
            file_name = '{}/BS{}_C{}_D{}_S{:d}_D0_E{:d}_v1.h5'.format(folder, 
                    self.BASE_STAT, chns_str, self.day, self.NS, self.NE)
        print(file_name)

        from .h5data import save_h5_data

        save_h5_data(file_name,
        {
            'events_time':np.array(self.events_time),
            'stats'      :np.array(self.stat_names, dtype='S'),
            'events_data':self.events_data.astype('float32'),
            'BASE_STAT'  :self.BASE_STAT,
            'DAY'        :self.day,
            'HST_DIRECTION':0,
            'ns'         :self.NS,
            'ne'         :self.NE,
            'dt'         :DT,
            'CHN'        :chns_str
        }
        )

        return 1

class TrainEvents_H5:
    def __init__(self,day='20190901', H5file_fmt='./sac/P320_H5_100Hz/P320_{day}.h5', 
                EVENT_LENGTH=100,
                CHNS        =['Z'],
                FS          =[0.2,20], 
                **args) -> None:
        self.day = day
        self.events_time = []
        self.NE = 0 # 事件数量
        self.NS = 0 # 台站数量
        self.stations = None
        
        self.stat_names = []
        self.events_data = None
        self.BASE_STAT = None
        
        # for h5 file
        self.data = None
        self.data_health = None
        self.nt = 1
        self.dt = .1
        self.t = None

        self.H5file = H5file_fmt.format(day=day)
        self.EVENT_LENGTH = EVENT_LENGTH
        self.CHNS = CHNS
        self.FS = FS

        self.args = args
    
    def activate_stations(self, mv2vel=2e6) -> None:
        # print(names)
        names,dt, nt,health = read_h5_data(self.H5file, keys=['names','dt','nt','health'])
        names = [i.decode() for i in names]

        self.stat_names = names
        self.NS = len(names)
        self.nt = nt
        self.dt = dt
        self.t = np.arange(nt)*self.dt
        self.data_health = health

        self.data = np.zeros([self.NS, len(self.CHNS),nt],dtype=f'float32')
        keys =[f'/data/{name}' for name in names]

        data = read_h5_data(self.H5file, keys)
        self.data=np.array(data,dtype=np.float32)/mv2vel

        print(self.NS,self.data.shape, self.stat_names)
    
    def __get_chn_data_slice(self, start_time:float, end_time:float, npts = 0):
        # UTC:+0, Landon Time
        t_s = self.t[0]
        dt = self.dt
        nt = self.nt
        sp = int( np.round( (start_time-t_s)/dt ) )
        ep = int( np.round( (end_time-t_s)  /dt ) )
        # 
        ns,nc,nt = self.data.shape
        data = np.zeros([ns,nc,ep-sp],dtype=np.float32)
        if sp<0:
            data[:,:,-sp:] = self.data[:,:,0:ep]
        elif ep>nt:
            data[:,:,:nt-sp] = self.data[:,:,sp:]
        else:
            data = self.data[:,:,sp:ep]

        if npts != 0 and data.shape[-1]!=npts:
            print(t_s, self.dt)
            raise ValueError('\n unmatched size',start_time, end_time, sp, ep, ep-sp, (start_time-t_s),(end_time-t_s))
        
        return data
    
    def get_events(self, BASE_STAT = 'P200',REF_CHN='Z', AMP_HSR=5e-5, MAX_AMP=0.001, event_times=[]) -> Tuple:

        base_idx = self.stat_names.index(BASE_STAT)
        data_search = self.data[base_idx, self.CHNS.index(REF_CHN),:]

        if len(event_times)==0:
            event_times = search_events(data_search, self.t, AMP_HSR=AMP_HSR,MAX_AMP=MAX_AMP,marker=f'{BASE_STAT}|{self.day}')
        
        EVENT_LENGTH = self.EVENT_LENGTH
        s0 = -EVENT_LENGTH/2
        e0 =  EVENT_LENGTH/2
        npts = int(np.round(EVENT_LENGTH/self.dt))
        ns = self.NS
        ne = len(event_times)
        records = np.zeros([ne,ns, len(self.CHNS),npts], dtype='float32')

        print(records.shape)

        for i_e, event_t in enumerate(event_times):
            print('\r generate dataset for event: {:.0f}|{}|{}|{}'.format(event_t, i_e, ne, BASE_STAT), end='', flush=True)

            s = s0+event_t
            e = e0+event_t

            records[i_e,:,:,:] = self.__get_chn_data_slice(s, e, npts = npts)
        # ne, nc, ns, nt
        records = np.transpose(records, [0,2,1,3])
        print('')
        self.events_time = event_times
        self.events_data = records
        self.NE = ne
        self.BASE_STAT = BASE_STAT
        return 1

    def save_events(self, folder='./data',file_name=None, TYPE='H5'):

        chns_str=''.join(i for i in self.CHNS)
        if file_name is None:
            file_name = '{}/BS{}_C{}_D{}_S{:d}_D0_E{:d}_v1.h5'.format(folder, 
                    self.BASE_STAT, chns_str, self.day, self.NS, self.NE)
        print(file_name)

        save_h5_data(file_name,
        {
            'events_time':np.array(self.events_time),
            'stats'      :np.array(self.stat_names, dtype='S'),
            'events_data':self.events_data.astype('float32'),
            'BASE_STAT'  :self.BASE_STAT,
            'DAY'        :self.day,
            'HST_DIRECTION':0,
            'ns'         :self.NS,
            'ne'         :self.NE,
            'dt'         :DT,
            'CHN'        :chns_str,
            # only for h5
            'health'     :self.data_health,
            'comments'   :self.H5file

        }
        )

        return 1

class RawDataBase:
    '''
    利用obspy.Stream建立一个数据库，方便进行事件截取等操作
    '''
    def __init__(self,dt=0.01):
        self.names=[]
        self.stream = ob.Stream()
        self.dt = dt
        self.ns = 0
        self.start_time=None

    def load_data_from_h5(self, h5file,names,names_keys, 
                        time_start='2000-01-01T00:00:00',
                        time_start_key=None, 
                        dt_key=None, 
                        FILL_NONE=False,
                        FLIM = [1,25],
                        CHN=None)->ob.Stream:
        '''
        load data from h5 file
        names: list of station names
        names_keys: list of keys for station names
        time_start: start time of the data
        dt: time step
        time_start_key: key for time_start
        dt_key: key for dt
        FILL_NONE: if True, fill zero when the key is not found
        CHN: channel idx, for multiple channels data(nc,nt)

        从h5文件中加载数据，并保存到obspy.Stream中
        names: 站名列表
        names_keys: 站名对应的key列表
        time_start: 数据起始时间
        dt: 数据时间步长
        time_start_key: 起始时间对应的key
        dt_key: 时间步长对应的key
        FILL_NONE: 如果key不存在，是否填充0

        return: obspy.Stream
        '''
        assert len(names)==len(names_keys)

        h5keys = get_h5_keys(h5file)
        traces_list = read_h5_data(h5file, keys=names_keys, 
                                    FILL_NONE= FILL_NONE, FILL_VALUE=np.zeros(1))
        assert len(traces_list)==len(names_keys)

        if dt_key is not None and dt_key in h5keys:
            dt, = read_h5_data(h5file, keys=[dt_key])
        else:
            dt = self.dt
            Warning(f'dt_key not found, use default dt={dt}')

        if time_start_key is not None and time_start_key in h5keys:
            time_start, = read_h5_data(h5file, keys=[time_start_key])
            time_start = time_start.decode()
        else:
            Warning(f'time_start_key not found, use default time_start={time_start}')
        traces4stream = []
        
        for i,trace in tqdm(enumerate(traces_list), total=len(traces_list)):
            # 4C 数据
            if CHN is not None and len(trace.shape)>1:
                trace = trace[CHN,:]

            trace = ob.Trace(data=trace, 
                             header={'delta':dt,'npts':len(trace),'starttime':UTCDateTime(time_start)})
            
            if len(trace.data)>1:
                trace.detrend()
                # trace.filter('highpass',freq=0.1)
                trace.filter('bandpass',freqmin=FLIM[0], freqmax=FLIM[1])
            
            trace.stats.station = names[i]
            traces4stream.append(trace)
        self.stream = ob.Stream(traces4stream)
        self.names = names
        self.ns = len(self.stream)
        self.start_time = UTCDateTime(time_start)

        return self.stream

    def load_data_from_sac(self, file_names, names,FILL_NONE=False, BAR=True)->ob.Stream:
        '''
        函数说明
        load data from sac files
        '''
        assert len(names)==len(file_names)

        traces_list = []
        
        if not BAR:
            file_G = enumerate(file_names)
        else:
            file_G = tqdm(enumerate(file_names), total=len(file_names), desc='load data from sac files')

        for i,file_name in file_G:
            if FILL_NONE and len(glob.glob(file_name))==0:
                Warning(f'file {file_name} not found, fill zero, dt={self.dt}')
                trace = ob.Trace(data=np.zeros(1), 
                             header={'delta':self.dt,'npts':1,'starttime':UTCDateTime('2000-01-01T00:00:00')}) 
            else:
                trace = ob.read(file_name)[0]
                assert (trace.stats.delta-self.dt)/self.dt<0.01
                trace.stats.station = names[i]
            traces_list.append(trace)

        self.stream = ob.Stream(traces_list)
        self.names = names
        self.ns = len(self.stream)

        return self.stream

    def get_event_by_time_safe(self, ts:UTCDateTime,te:UTCDateTime) -> Tuple[ob.Stream,np.ndarray,np.ndarray]:
        '''
        使用stream.copy.trim来进行填充与截取，但是速度慢

        get event data by time
        ts: start time
        te: end time

        return ob.Stream, data, t
        '''
        # print(self.stream[100])
        stream = self.stream.copy() # 避免操作改了原始数据
        event_traces =  stream.trim(starttime=ts, endtime=te,pad=True,fill_value=0)
        # event_traces =  self.stream.(starttime=ts, endtime=te,pad=True,fill_value=0)
        
        data = []
        NT = int((te-ts)/self.dt)
        
        # print(event_traces[100].data.max(), NT)
        for i in range(len(event_traces)):
            data.append(event_traces[i].data[:NT])
        data = np.array(data)
        t = np.arange(0,data.shape[-1])*self.dt

        return event_traces, data,t

    def get_event_by_time_fast(self, ts:UTCDateTime,te:UTCDateTime) -> Tuple[ob.Stream,np.ndarray,np.ndarray]:  
        '''
        自己写的一个事件截取方式，数据不全的不取用

        get event data by time
        ts: start time
        te: end time

        return data, t
        '''
        
        # print(event_traces[100].data.max(), NT)
        NT = int((te-ts)/self.dt)
        data = np.zeros((self.ns,NT), dtype=np.float32)
        for i in range(self.ns):
            trace_i = self.stream[i]
            tsp = int((ts-trace_i.stats.starttime)/self.dt)
            tep = tsp+NT
            if tsp<0 or tep>trace_i.stats.npts:
                continue
            else:
                data[i,:] = trace_i.data[tsp:tep]

        # 去均值，避免偏移中心值
        data = signal.detrend(data, axis=-1)
    
        t = np.arange(0,NT)*self.dt -int(NT/2)*self.dt
        # print(data.max())
        return data,t
    def get_trace_by_name(self, name) -> ob.Trace:
        '''
        get trace by name   
        '''
        trace = self.stream[self.names.index(name)]
        return trace

    def downsample(self, factor=2):
        
        self.stream.decimate(factor)

        self.dt = self.dt*factor
        