"""
Extract DtCC through parallel cross-correlation computation for each event pair.
HuJing @ Chang'an University 
Email: hujing@chd.edu.cn 
Date: 2025-10-227
228454 waveforms cc for 430 seconds, 7 minute
"""
from obspy import UTCDateTime
from obspy.signal.cross_correlation import correlate,xcorr_max,xcorr_pick_correction,cosine_taper
import obspy
import os
import numpy as np 
import logging
import tqdm
import glob,gc
import time
import warnings
import  pickle
from  concurrent.futures import ProcessPoolExecutor,ThreadPoolExecutor,as_completed
warnings.filterwarnings("ignore")

def xcorr_pick_correction_new(pick1, trace1,pick2, trace2, t_before, t_after,cc_maxlag,filter='bandpass', filter_options={},plot=False,filename=None):
        if trace1.stats.sampling_rate != trace2.stats.sampling_rate:
            msg = "Sampling rates do not match"
            raise Exception(msg)
        if trace1.id != trace2.id:
            msg = "Trace ids do not match: %s != %s" % (trace1.id, trace2.id)
            warnings.warn(msg)
        samp_rate = trace1.stats.sampling_rate
    # don't modify existing traces with filters
        if filter:
            trace1 = trace1.copy()
            trace2 = trace2.copy()
    # check data, apply filter and take correct slice of traces
        slices = []
        for _i, (t, tr) in enumerate(((pick1, trace1), (pick2, trace2))):
            start = t - t_before - (cc_maxlag / 2.0)
            end = t + t_after + (cc_maxlag / 2.0)
            duration = end - start
            # check if necessary time spans are present in data
            if tr.stats.starttime > start:
                msg = "Trace %s starts too late." % _i
                raise Exception(msg)
            if tr.stats.endtime < end:
                msg = "Trace %s ends too early." % _i
                raise Exception(msg)
            if filter and start - tr.stats.starttime < duration:
                msg = "Artifacts from signal processing possible. Trace " + \
                    "%s should have more additional data at the start." % _i
                warnings.warn(msg)
            if filter and tr.stats.endtime - end < duration:
                msg = "Artifacts from signal processing possible. Trace " + \
                    "%s should have more additional data at the end." % _i
                warnings.warn(msg)
            # apply signal processing and take correct slice of data
            if filter:
                tr.data = tr.data.astype(np.float64)
                tr.detrend(type='demean')
                tr.data *= cosine_taper(len(tr), 0.1)
                tr.filter(type=filter, **filter_options)
            slices.append(tr.slice(start, end))
        # cross correlate
        shift_len = int(cc_maxlag * samp_rate)
        cc = correlate(slices[0].data, slices[1].data, shift_len, method='direct')
        _cc_shift, cc_max = xcorr_max(cc)
        # check results of cross correlation
        if cc_max < 0:
            msg = "Absolute maximum is negative: %.3f. " % cc_max + \
                "Using positive maximum: %.3f" % max(cc)
            warnings.warn(msg)
            cc_max = max(cc)
        if cc_max < 0.8:
            msg = "Maximum of cross correlation lower than 0.8: %s" % cc_max
            warnings.warn(msg)
        # make array with time shifts in seconds corresponding to cc function
        cc_t = np.linspace(-cc_maxlag, cc_maxlag, shift_len * 2 + 1)
 
        dt = _cc_shift/samp_rate
        dt=-dt
        pick2_corr = dt
    # plot the results if selected
        if plot is True:
            import matplotlib.pyplot as plt 
            fig = plt.figure()
            ax1 = fig.add_subplot(211)
            tmp_t = np.linspace(0, len(slices[0]) / samp_rate, len(slices[0]))
            ax1.plot(tmp_t, slices[0].data / float(slices[0].data.max()), "k",
                     label="Trace 1")
            ax1.plot(tmp_t, slices[1].data / float(slices[1].data.max()), "r",
                     label="Trace 2")
            ax1.plot(tmp_t - dt, slices[1].data / float(slices[1].data.max()),
                     "g", label="Trace 2 (shifted)")
            ax1.legend(loc="lower right", prop={'size': "small"})
            ax1.set_title("%s" % slices[0].id)
            ax1.set_xlabel("time [s]")
            ax1.set_ylabel("norm. amplitude")
            ax2 = fig.add_subplot(212)
            ax2.plot(cc_t, cc, ls="", marker=".", color="k",
                     label="xcorr")
            ax2.set_xlabel("shift time [s]")
            ax2.set_ylabel("correlation coefficient")
            ax2.set_ylim(-1, 1)
            ax2.set_xlim(cc_t[0], cc_t[-1])
            ax2.legend(loc="lower right", prop={'size': "x-small"})
            # plt.legend(loc="lower left")
            if filename:
                fig.savefig(filename)
            else:
                plt.show()

        return (pick2_corr, cc_max)

class ComputeCCDifferentialTime:
    def __init__(self,par) -> None:
        self.logfile=par['logfile']
        self.message="Extract CC.DT"
        self.ccfunc=par['ccfunc']
        self.beforePpick=par['beforePpick'] #0.5
        self.afterPpick=par['afterPpick']   #0.5
        self.cc_Pmaxlag=par['cc_Pmaxlag']   #0.3

        self.beforeSpick=par['beforeSpick'] # 0.5
        self.afterSpick=par['afterSpick']   
        self.cc_Smaxlag=par['cc_Smaxlag'] #0.5
        
        self.cc_threshold=par['cc_threshold'] #0.5
        self.ccplot=par['ccplot']

        self.filter_options=par['filter_options'] #{'freqmin': 0.5, 'freqmax': 5,'zerophase':True,'corners':4}
        self.samplingrate=par['samplingrate']    

        
        self.DataDir=par['DataDir']  
        self.ctpairfile=par['ctpairfile']  
        self.hypodata=par['hypodata']  
        self.evtime={}
        self.preprocAllStream={}
 
        self.max_workers= par['max_workers'] or max(1, os.cpu_count() - 1)
        self.ccfile_out=par['ccfile_out']   
        #
        self.pairfile=par['pairfile']       
        #
        os.system(f"rmdir {self.DataDir}") # delete empty directory
        self.allevents=os.listdir(self.DataDir)
        self.setup_logging()

        with open(self.ccfile_out, 'w', encoding='utf-8') as f:
            f.write("evid1,evid2,o1,o2,tt1,tt2,dt_cc,coeff,dt_cat,net,station,pha\n")


    def setup_logging(self):
        """设置日志系统"""
        self.logger = logging.getLogger(self.message)
        self.logger.setLevel(logging.INFO)
        formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        file_handler = logging.FileHandler(self.logfile,mode='w')
        file_handler.setLevel(level=logging.INFO)
        file_handler.setFormatter(formatter)
        self.logger.addHandler(file_handler)

    def get_evtime(self):
        with open(self.hypodata,'r') as f:
            tmp=f.read().splitlines()
            for line in tmp:
                ll=line.split()
                if ll[0]=="#":
                    evid=ll[-1]
                    year,mon,day,hour,minitue,seconds=ll[1:7]
                    origtime=UTCDateTime(int(year),int(mon),int(day),int(hour),int(minitue))+float(seconds)
                    self.evtime[evid]=origtime
        return self.evtime

    def get_pairs_fromDTCT(self):
        # load from self.parifile if it exists
        if os.path.exists(self.pairfile):  
            with open(self.pairfile, 'rb') as f:
                self.pairinfo = pickle.load(f) 
                return 
        # read from self.pairfile 
        self.pairinfo=[]
        with open(self.ctpairfile,'r') as f:
            lines=f.read().splitlines()
            
        # batch_size = 50000
        # for i in tqdm.tqdm(range(0, len(lines), batch_size), desc="Reading pairs"):
        #     batch = lines[i:i + batch_size]
        #     for line in batch:
        #         ll=line.split()
        #         if ll[0]=="#":
        #             evid1,evid2=ll[1:]
        #             o1=self.evtime[evid1]
        #             o2=self.evtime[evid2]
        #         else:
        #             if (evid1 not in self.allevents) or (evid2 not in self.allevents):
        #                 continue 
        #             else:
        #                 netsta,tt1,tt2,_,pha=ll
        #                 net=netsta[0:2]
        #                 station=netsta[2:]
        #                 tt1=float(tt1)
        #                 tt2=float(tt2)
        #                 t1=o1+tt1
        #                 t2=o2+tt2
        #                 self.pairinfo.append([evid1,evid2,o1,o2,tt1,tt2,t1,t2,net,station,pha])
            
            
            
            nline=len(lines)
            for line in tqdm.tqdm(lines,desc="Reading pairs"): # if tmp is iterater, directly us tqdm
                ll=line.split()
                if ll[0]=="#":
                    evid1,evid2=ll[1:]
                    o1=self.evtime[evid1]
                    o2=self.evtime[evid2]
                else:
                    if (evid1 not in self.allevents) or (evid2 not in self.allevents):
                        continue 
                    else:
                        netsta,tt1,tt2,_,pha=ll
                        net=netsta[0:2]
                        station=netsta[2:]
                        tt1=float(tt1)
                        tt2=float(tt2)
                        t1=o1+tt1
                        t2=o2+tt2
                        self.pairinfo.append([evid1,evid2,o1,o2,tt1,tt2,t1,t2,net,station,pha])
        # write pairinfo into a file 
        with open(self.pairfile, 'wb') as f:
            pickle.dump(self.pairinfo, f)

   


def cal_cc_one_pair_readstreamfromfile(args):
                        
    # '''
    # t1: o1+tt1
    # t2: o2+tt2
    # '''
    
    (evid1, evid2, o1, o2, tt1, tt2, t1, t2, net, station, pha), config = args
    
    # 复制关键配置，避免传递整个self
    DataDir = config['DataDir']
    beforePpick = config['beforePpick']
    afterPpick = config['afterPpick']
    cc_Pmaxlag = config['cc_Pmaxlag']
    beforeSpick = config['beforeSpick']
    afterSpick = config['afterSpick']
    cc_Smaxlag = config['cc_Smaxlag']
    cc_threshold = config['cc_threshold']
    filter_options = config['filter_options']
    ccplot = config['ccplot']
    netsta=net+station
    sacfiles1=glob.glob(os.path.join(DataDir,evid1,f'{net}.{netsta}.*'))
    sacfiles2=glob.glob(os.path.join(DataDir,evid2,f'{net}.{netsta}.*'))
    #print(sacfiles1,os.path.join(DataDir,evid1,f'{net}.{netsta}.*'))
    #print(sacfiles2,os.path.join(DataDir,evid2,f'{net}.{netsta}.*'))
    if len(sacfiles1)==0 or len(sacfiles2)==0:
        
        return None
    
    
    if pha in ["P","Pn","Pg"]:
        sacfiles1=os.path.join(DataDir,evid1,f'{net}.{netsta}.*Z')
        sacfiles2=os.path.join(DataDir,evid2,f'{net}.{netsta}.*Z')
        try:
            tr1=obspy.read(sacfiles1)[0]
            tr2=obspy.read(sacfiles2)[0]
        except:
            print("file format error!")
            return None 
        try:

            dt_correct, coeff = xcorr_pick_correction_new(t1, 
                                                        tr1,
                                                        t2, 
                                                        tr2, 
                                                        beforePpick, 
                                                        afterPpick,
                                                        cc_Pmaxlag,filter="bandpass",
                                                        filter_options=filter_options,
                                                        plot=ccplot)
            del tr1,tr2,sacfiles1,sacfiles2,args
        except Exception as e:
            print(str(e))
            return None

            

    elif pha in ["S","Sn","Sg"]:
        sacfiles1=os.path.join(DataDir,evid1,f'{net}.{netsta}.*')
        sacfiles2=os.path.join(DataDir,evid2,f'{net}.{netsta}.*')

        try:
            stream1=obspy.read(sacfiles1)
            stream2=obspy.read(sacfiles2)
        except:
            print("File format error!")
            return None 
        if len(stream1)!=3 or len(stream2)!=3: 
            return None  

        try:
            tr1E=stream1.select(network=net,station=station,component="E")[0]
            tr2E=stream2.select(network=net,station=station,component="E")[0]
        except:
            #print("No component E, try component 1")
            try :
                tr1E=stream1.select(network=net,station=station,component="1")[0]
                tr2E=stream2.select(network=net,station=station,component="1")[0]
            except:
                print(stream1,stream1[0].stats)
                raise("No horizontal components")
                #print("No horizontal component 1")


        try:

            dt_correctE, coeffE = xcorr_pick_correction_new(t1, 
                                                        tr1E, 
                                                        t2, 
                                                        tr2E, 
                                                        beforeSpick, 
                                                        afterSpick,
                                                        cc_Smaxlag,
                                                        filter="bandpass",
                                                        filter_options=filter_options,
                                                        plot=ccplot)
            
        except Exception as e:
            print(str(e))
            return None

        try: 
            tr1N=stream1.select(network=net,station=station,component="N")[0]
            tr2N=stream2.select(network=net,station=station,component="N")[0]
        except:
            #print("No component N, try component 2")
            try :
                tr1N=stream1.select(network=net,station=station,component="2")[0]
                tr2N=stream2.select(network=net,station=station,component="2")[0]
            except:
                #print("No horizontal")
                raise("No horizontal components")

        try:

            dt_correctN, coeffN = xcorr_pick_correction_new(t1, 
                                                        tr1N, 
                                                        t2, 
                                                        tr2N, 
                                                        beforeSpick, 
                                                        afterSpick,
                                                        cc_Smaxlag,
                                                        filter="bandpass",
                                                        filter_options=filter_options,
                                                        plot=ccplot)
        
            
        except Exception as e:
            print(str(e))
            return None
        del stream1, stream2,sacfiles1,sacfiles2,args

        if coeffE>coeffN:
            dt_correct=dt_correctE
            coeff=coeffE
        else:
            dt_correct=dt_correctN
            coeff= coeffN
    if  coeff>cc_threshold:
        dtcc = (tt2+dt_correct)-(tt1)
        dtcat=tt2-tt1
        # return dt_correct,coeff,res_dt
        return [evid1,evid2,o1,o2,tt1,tt2,dtcc,coeff,dtcat,net,station,pha]
                
    else: 
        return None
def cal_chunk_cc(args):
    onetask,config=args
    coll_result=[]
    # 处理任务块中的每个波形对
    i=0
    for par in tqdm.tqdm(onetask,total=len(onetask),desc="runing one task"):
        # i=i+1
        # if i%1000==0: free_memory()
    # for par in onetask :
        result = cal_cc_one_pair_readstreamfromfile(args=(par, config))
        try:
            # evid1, evid2, o1, o2, tt1, tt2, t1, t2, net, station, pha = par
            # 选择其中一种计算方法
            result = cal_cc_one_pair_readstreamfromfile(args=(par, config))
            if result is not None:
                coll_result.append(result)
        except Exception as e:
            print(f"处理单个波形对时出错: {e}")
            continue  # 继续处理下一个，不中断整个任务块
    
    print(f"任务块处理完成，有效结果: {len(coll_result)}/{len(onetask)}")
    del result, onetask, par,config, args 
    return coll_result
    
    
    
def calculate_optimal_chunksize(total_pairs, workers):
    """计算最优的块大小"""
    if total_pairs <= 1000:
        base_chunk = max(50, total_pairs // workers)
    else:
        base_chunk = max(100, total_pairs // workers)
    
    # 限制最大块大小以避免内存溢出
    # max_chunk = 6000
    return base_chunk

def write_cc_result(cc_filename, result):
    """实时写入单个结果到文件"""
    evid1,evid2,o1,o2,tt1,tt2,dtcc,coeff,dtcat,net,station,pha=result 
    line=f"{evid1},{evid2},{o1},{o2},{tt1},{tt2},{dtcc},{coeff},{dtcat},{net},{station},{pha}\n"
    with open(cc_filename, 'a', encoding='utf-8') as f:
        f.write(line)

        
def process_completed_futures(cc_filename,futures):
    """处理已完成的任务并更新进度"""
    completed = []
    for future in list(futures):
        if future.done():
            try:
                results = future.result()
                if results:
                    for result in results:
                        write_cc_result(cc_filename,result)
            except Exception as e:
                print(f"\n处理任务时出错: {e}")
            finally:
                completed.append(future)
    
# 移除已完成的任务
    for future in completed:
        futures.remove(future)
    return futures

def free_memory():
    """主动释放内存"""
    gc.collect()
    
def run_single(config,pairs):
    """并行运行所有参数组合"""
    
    total_pairs = len(pairs)
    start_time = time.time()
    cc_filename=config['ccfile_out']
    fid=open(cc_filename,'a')
    with tqdm.tqdm(total=total_pairs) as pbar:
        for i in range(total_pairs):
            pbar.update(1)
            evid1,evid2,o1,o2,tt1,tt2,t1,t2,net,station,pha= pairs[i]
            result = cal_cc_one_pair_readstreamfromfile(args=(pairs[i], config))
            
            if result is None: continue      
            evid1,evid2,o1,o2,tt1,tt2,dtcc,coeff,dtcat,net,station,pha = result
            fid.write(f"{evid1},{evid2},{o1},{o2},{tt1},{tt2},{dtcc},{coeff},{dtcat},{net},{station},{pha}\n")
            fid.flush()
        fid.close()
        end_time = time.time()
        print(f"计算完成，用时 {end_time - start_time:.2f} 秒")
def run_parallel(config,pairs):
    max_workers=config['max_workers']
    cc_filename=config['ccfile_out']
    chunksize = calculate_optimal_chunksize(len(pairs),max_workers)
    tasks_chunk = [pairs[i:i + chunksize] for i in range(0, len(pairs), chunksize)]
    total_chunks = len(tasks_chunk)
    print(f"波形对总数: {len(pairs)}")
    print(f"任务块数: {total_chunks}, 每块大小: {chunksize}")
    print(f"使用进程数: {max_workers}")
    start_time = time.time()
    futures=[]
    with ProcessPoolExecutor(max_workers=max_workers) as executor: # large memory 
    # with ThreadPoolExecutor(max_workers=self.max_workers) as executor: # not efficient
        futures = []
        for task in tasks_chunk:
                futures.append(executor.submit(cal_chunk_cc,(task,config)))
        for future in tqdm.tqdm(as_completed(futures),total=total_chunks,desc="processing tasks"):
            results = future.result()
            if len(results)>0 :
                for result in results: write_cc_result(cc_filename,result)  
                free_memory()

        
        
        
        
        
        # 分批提交任务 不起作用，只算第一个任务
        # for task in tasks_chunk:
              
        #     if len(futures) >= max_workers:  # 控制待处理任务数量
        #         # 等待部分任务完成
        #         futures=process_completed_futures(cc_filename,futures)
        #         free_memory()  # 主动释放内存
        # # 提交新任务
        # future=executor.submit(cal_chunk_cc,(task,config))
        # futures.append(future)
        # #处理剩余任务
        # while futures:
        #     process_completed_futures(cc_filename,futures)
        #     free_memory()
    
    total_time = time.time() - start_time
    print("-" * 60)
    print(f"计算完成! 总耗时: {total_time:.2f}秒")
    print(f"平均每个波形对: {total_time/total_chunks*1000:.1f}毫秒")
    print(f"结果文件: {cc_filename}")
    
def main(start,end,par):
    """主函数"""
    exe=ComputeCCDifferentialTime(par)
    # get evtime 
    exe.get_evtime()
    # get pair information 
    exe.get_pairs_fromDTCT()
    pairs=exe.pairinfo[start:end]
    #run_single(par,pairs)
    run_parallel(par,pairs)
    
if __name__ == "__main__":
    import sys
    start=0
    end=5915249 #5915249; 889908 #889908 #1043683 #6386009
    seg=sys.argv[1]
    par={'logfile':'./Dingri.log',
        'max_workers':20,
        'ccfile_out':'./cc.csv',
        'beforePpick':0.5,
        'afterPpick':0.5,
        'cc_Pmaxlag':0.3,

        'beforeSpick':0.5,
        'afterSpick':1.5,
        'cc_Smaxlag':0.5,
        'cc_threshold':0.5,
        'filter_options':{'freqmin': 0.5,   # bandpass filter 
                          'freqmax': 5,
                          'zerophase':True,
                          'corners':4},
        'ccfunc':xcorr_pick_correction_new,
        'ccplot':False,
        'samplingrate':100,
        'DataDir':'/data/home/hj/BorregoSprings-waveforms/waveforms',
        'ctpairfile':f"/data/home/hj/01-2025-tomoTDDMC-Results/Input/dt.ct.time_{seg}",        #'./dt.ct', #
        'hypodata':f"./hypoDD_{seg}.pha", # 
        # if pairfile does not exist, it will produce pairfile according to ctpairfile
        'pairfile':'pairs.pkl',
    }

 
    main(start,end,par)
