"""
Extract DtCC through parallel cross-correlation computation for each event pair.
HuJing @ Chang'an University 
Email: hujing@chd.edu.cn 
Date: 2025-09-25
228454 waveforms cc for 430 seconds, 7 minute
"""
from obspy import UTCDateTime
from obspy.signal.cross_correlation import correlate,xcorr_max,xcorr_pick_correction,cosine_taper
import obspy
import os
import numpy as np 
import logging
import tqdm
import glob
import time
import warnings
import  pickle
from  concurrent.futures import ProcessPoolExecutor,ThreadPoolExecutor,as_completed
warnings.filterwarnings("ignore")

def xcorr_pick_correction_new(pick1, trace1,pick2, trace2, t_before, t_after,cc_maxlag,filter='bandpass', filter_options={},plot=False,filename=None):
        if trace1.stats.sampling_rate != trace2.stats.sampling_rate:
            msg = "Sampling rates do not match"
            raise Exception(msg)
        if trace1.id != trace2.id:
            msg = "Trace ids do not match: %s != %s" % (trace1.id, trace2.id)
            warnings.warn(msg)
        samp_rate = trace1.stats.sampling_rate
    # don't modify existing traces with filters
        if filter:
            trace1 = trace1.copy()
            trace2 = trace2.copy()
    # check data, apply filter and take correct slice of traces
        slices = []
        for _i, (t, tr) in enumerate(((pick1, trace1), (pick2, trace2))):
            start = t - t_before - (cc_maxlag / 2.0)
            end = t + t_after + (cc_maxlag / 2.0)
            duration = end - start
            # check if necessary time spans are present in data
            if tr.stats.starttime > start:
                msg = "Trace %s starts too late." % _i
                raise Exception(msg)
            if tr.stats.endtime < end:
                msg = "Trace %s ends too early." % _i
                raise Exception(msg)
            if filter and start - tr.stats.starttime < duration:
                msg = "Artifacts from signal processing possible. Trace " + \
                    "%s should have more additional data at the start." % _i
                warnings.warn(msg)
            if filter and tr.stats.endtime - end < duration:
                msg = "Artifacts from signal processing possible. Trace " + \
                    "%s should have more additional data at the end." % _i
                warnings.warn(msg)
            # apply signal processing and take correct slice of data
            if filter:
                tr.data = tr.data.astype(np.float64)
                tr.detrend(type='demean')
                tr.data *= cosine_taper(len(tr), 0.1)
                tr.filter(type=filter, **filter_options)
            slices.append(tr.slice(start, end))
        # cross correlate
        shift_len = int(cc_maxlag * samp_rate)
        cc = correlate(slices[0].data, slices[1].data, shift_len, method='direct')
        _cc_shift, cc_max = xcorr_max(cc)
        # check results of cross correlation
        if cc_max < 0:
            msg = "Absolute maximum is negative: %.3f. " % cc_max + \
                "Using positive maximum: %.3f" % max(cc)
            warnings.warn(msg)
            cc_max = max(cc)
        if cc_max < 0.8:
            msg = "Maximum of cross correlation lower than 0.8: %s" % cc_max
            warnings.warn(msg)
        # make array with time shifts in seconds corresponding to cc function
        cc_t = np.linspace(-cc_maxlag, cc_maxlag, shift_len * 2 + 1)
 
        dt = _cc_shift/samp_rate
        dt=-dt
        pick2_corr = dt
    # plot the results if selected
        if plot is True:
            import matplotlib.pyplot as plt 
            fig = plt.figure()
            ax1 = fig.add_subplot(211)
            tmp_t = np.linspace(0, len(slices[0]) / samp_rate, len(slices[0]))
            ax1.plot(tmp_t, slices[0].data / float(slices[0].data.max()), "k",
                     label="Trace 1")
            ax1.plot(tmp_t, slices[1].data / float(slices[1].data.max()), "r",
                     label="Trace 2")
            ax1.plot(tmp_t - dt, slices[1].data / float(slices[1].data.max()),
                     "g", label="Trace 2 (shifted)")
            ax1.legend(loc="lower right", prop={'size': "small"})
            ax1.set_title("%s" % slices[0].id)
            ax1.set_xlabel("time [s]")
            ax1.set_ylabel("norm. amplitude")
            ax2 = fig.add_subplot(212)
            ax2.plot(cc_t, cc, ls="", marker=".", color="k",
                     label="xcorr")
            ax2.set_xlabel("shift time [s]")
            ax2.set_ylabel("correlation coefficient")
            ax2.set_ylim(-1, 1)
            ax2.set_xlim(cc_t[0], cc_t[-1])
            ax2.legend(loc="lower right", prop={'size': "x-small"})
            # plt.legend(loc="lower left")
            if filename:
                fig.savefig(filename)
            else:
                plt.show()

        return (pick2_corr, cc_max)


class ComputeCCDifferentialTime:
    def __init__(self,par) -> None:
        self.logfile=par['logfile']
        self.message="Extract CC.DT"
        self.ccfunc=par['ccfunc']
        self.beforePpick=par['beforePpick'] #0.5
        self.afterPpick=par['afterPpick']   #0.5
        self.cc_Pmaxlag=par['cc_Pmaxlag']   #0.3

        self.beforeSpick=par['beforeSpick'] # 0.5
        self.afterSpick=par['afterSpick']   
        self.cc_Smaxlag=par['cc_Smaxlag'] #0.5
        
        self.cc_threshold=par['cc_threshold'] #0.5
        self.ccplot=par['ccplot']

        self.filter_options=par['filter_options'] #{'freqmin': 0.5, 'freqmax': 5,'zerophase':True,'corners':4}
        self.samplingrate=par['samplingrate']    

        
        self.DataDir=par['DataDir']  
        self.ctpairfile=par['ctpairfile']  
        self.hypodata=par['hypodata']  
        self.evtime={}
        self.preprocAllStream={}
 
        self.max_workers= par['max_workers'] or max(1, os.cpu_count() - 1)
        self.ccfile_out=par['ccfile_out']   
        #
        self.pairfile=par['pairfile']       
        #
        os.system(f"rmdir {self.DataDir}") # delete empty directory
        self.allevents=os.listdir(self.DataDir)
        self.setup_logging()

        with open(self.ccfile_out, 'w', encoding='utf-8') as f:
            f.write("evid1,evid2,o1,o2,tt1,tt2,dt_cc,coeff,dt_cat,net,station,pha\n")
    
    def _write_result(self, result):

        """实时写入单个结果到文件"""
        evid1,evid2,o1,o2,tt1,tt2,dtcc,coeff,dtcat,net,station,pha=result 
        line=f"{evid1},{evid2},{o1},{o2},{tt1},{tt2},{dtcc},{coeff},{dtcat},{net},{station},{pha}\n"
        with open(self.ccfile_out, 'a', encoding='utf-8') as f:
            f.write(line)

    def setup_logging(self):
        """设置日志系统"""
        self.logger = logging.getLogger(self.message)
        self.logger.setLevel(logging.INFO)
        formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        file_handler = logging.FileHandler(self.logfile,mode='w')
        file_handler.setLevel(level=logging.INFO)
        file_handler.setFormatter(formatter)
        self.logger.addHandler(file_handler)

    def get_evtime(self):
        with open(self.hypodata,'r') as f:
            tmp=f.read().splitlines()
            for line in tmp:
                ll=line.split()
                if ll[0]=="#":
                    evid=ll[-1]
                    year,mon,day,hour,minitue,seconds=ll[1:7]
                    origtime=UTCDateTime(int(year),int(mon),int(day),int(hour),int(minitue))+float(seconds)
                    self.evtime[evid]=origtime
        return self.evtime

    def get_pairs_fromDTCT(self):
        # load from self.parifile if it exists
        if os.path.exists(self.pairfile):  
            with open(self.pairfile, 'rb') as f:
                self.pairinfo = pickle.load(f) 
                return 
        # read from self.pairfile 
        self.pairinfo=[]
        with open(self.ctpairfile,'r') as f:
            tmp=f.read().splitlines()
            nline=len(tmp)
            for line in tqdm.tqdm(tmp): # if tmp is iterater, directly us tqdm
                ll=line.split()
                if ll[0]=="#":
                    evid1,evid2=ll[1:]
                    o1=self.evtime[evid1]
                    o2=self.evtime[evid2]
                else:
                    if (evid1 not in self.allevents) or (evid2 not in self.allevents):
                        continue 
                    else:
                        netsta,tt1,tt2,_,pha=ll
                        net=netsta[0:2]
                        station=netsta[2:]
                        tt1=float(tt1)
                        tt2=float(tt2)
                        t1=o1+tt1
                        t2=o2+tt2
                        self.pairinfo.append([evid1,evid2,o1,o2,tt1,tt2,t1,t2,net,station,pha])
        # write pairinfo into a file 
        with open(self.pairfile, 'wb') as f:
            pickle.dump(self.pairinfo, f)

    
    
    def readAllSAC(self):
        self.preprocAllStream={}
        for evid in  tqdm.tqdm(self.allevents,total=len(self.allevents),desc="Reading SAC Files"):
            # read sac files 
            sacfiles=os.path.join(self.DataDir,evid,'*')
            try:
                stream=obspy.read(sacfiles)
                # preprocess
                # stream.detrend("demean").detrend("linear")
                # stream.detrend("linear")
                # stream.taper(max_percentage=0.05,type="hann")
                # stream.filter("bandpass",freqmin=self.fmin,freqmax=self.fmax,zerophase=True,corners=4)
                self.preprocAllStream[evid]=stream
                self.logger.info("# reading "+evid )
            except Exception as e:
                self.logger.error(str(e)) 
                print(str(e))
             
                
                

    def readSAC_withChunk(self,idlist,sacfilelist): 
        nev=len(idlist)
        streaminfo={}
        for i in tqdm.tqdm(range(nev),total=nev,desc="Reading SAC files"):
        # for i in range(nev):
            sacfiles=sacfilelist[i]
            evid=idlist[i]
            # read sac files 
            try:
                stream=obspy.read(sacfiles)
                streaminfo[evid]=stream
                # # preprocess
                # stream.detrend("demean").detrend("linear")
                # stream.detrend("linear")
                # stream.taper(max_percentage=0.05,type="hann")
                # stream.filter("bandpass",freqmin=self.fmin,freqmax=self.fmax,zerophase=True,corners=4)
                self.logger.info("# reading "+evid )
            except Exception as e:
                self.logger.error(str(e))
                print(str(e))
        return streaminfo  
     
    def readAndPreprocessAllSAC(self,max_workers=2):
        #I/O竞争导致单线程读取速度快于多线程，
        #多进程不共享数据如在类中定义的self.preprocAllStream
        pass
        # self.preprocAllStream={}
        # tasks_chunk=[]
        # num_event=len(self.allevents)
        # if num_event <= 1000:
        #     chunksize = max(50, num_event // max_workers)
        # else:
        #     chunksize = max(100, num_event // max_workers )
    
        # tasks_chunk = [ ]
        # for i in range(0, num_event, chunksize):
        #     idlist=self.allevents[i:i + chunksize]
        #     sacfilelist=[]
        #     for evid in idlist:
        #         sacfilelist.append(os.path.join(self.DataDir,evid,'*'))
        #     tasks_chunk.append([idlist,sacfilelist])
        # total_chunks = len(tasks_chunk)
            
        # #创建进程池
        # futures=[]
        # with ThreadPoolExecutor(max_workers=max_workers) as executor:
        #     for i in range(total_chunks):
        #         idlist,sacfilelist=tasks_chunk[i]
        #         futures.append(executor.submit(self.readSAC_withChunk,idlist,sacfilelist))
        #     # realtime to process task 
        #     for future in tqdm.tqdm(as_completed(futures),total=total_chunks,desc="Loading SAC files"):
        #         try:
        #             streaminfo = future.result() 
        #             if len(streaminfo) >0:
        #                 try:
        #                     self.preprocAllStream.update(streaminfo) #[evid]=stream# 主进程合并
        #                 except Exception as e:
        #                     self.logger.error(str(e))
        #                     print(str(e))
        #         except Exception as e:
        #             self.logger.error(str(e))
        #             print(str(e))
                
                
    def cal_cc_one_pair_readstreamfromfile(self,evid1,evid2,o1,o2,tt1,tt2,t1,t2,net,station,pha):
                            
        # '''
        # t1: o1+tt1
        # t2: o2+tt2
        # '''
        # stream1=self.preprocAllStream[evid1]
        # stream2=self.preprocAllStream[evid2]
        sacfiles1=glob.glob(os.path.join(self.DataDir,evid1,f'{net}.{station}.*'))
        sacfiles2=glob.glob(os.path.join(self.DataDir,evid2,f'{net}.{station}.*'))
        if len(sacfiles1)==0 or len(sacfiles2)==0:
            return None
        
        
        if pha in ["P","Pn","Pg"]:
            sacfiles1=os.path.join(self.DataDir,evid1,f'{net}.{station}.*Z')
            sacfiles2=os.path.join(self.DataDir,evid2,f'{net}.{station}.*Z')
            tr1=obspy.read(sacfiles1)[0]
            tr2=obspy.read(sacfiles2)[0]
            try:
 
                dt_correct, coeff = self.ccfunc(t1, 
                                                            tr1,
                                                            t2, 
                                                            tr2, 
                                                            self.beforePpick, 
                                                            self.afterPpick,
                                                            self.cc_Pmaxlag,filter="bandpass",
                                                            filter_options=self.filter_options,
                                                            plot=self.ccplot)
            except Exception as e:
                self.logger.error(str(e))
                return None

                

        elif pha in ["S","Sn","Sg"]:
            sacfiles1=os.path.join(self.DataDir,evid1,f'{net}.{station}.*')
            sacfiles2=os.path.join(self.DataDir,evid2,f'{net}.{station}.*')
            stream1=obspy.read(sacfiles1)
            stream2=obspy.read(sacfiles2)

            tr1E=stream1.select(network=net,station=station,component="E")[0]
            tr2E=stream2.select(network=net,station=station,component="E")[0]

            try:
 
                dt_correctE, coeffE = self.ccfunc(t1, 
                                                            tr1E, 
                                                            t2, 
                                                            tr2E, 
                                                            self.beforeSpick, 
                                                            self.afterSpick,
                                                            self.cc_Smaxlag,
                                                            filter="bandpass",
                                                            filter_options=self.filter_options,
                                                            plot=self.ccplot)
            except Exception as e:
                self.logger.error(str(e))
                return None

            
            tr1N=stream1.select(network=net,station=station,component="N")[0]
            tr2N=stream2.select(network=net,station=station,component="N")[0]
            try:
 
                dt_correctN, coeffN = self.ccfunc(t1, 
                                                            tr1N, 
                                                            t2, 
                                                            tr2N, 
                                                            self.beforeSpick, 
                                                            self.afterSpick,
                                                            self.cc_Smaxlag,
                                                            filter="bandpass",
                                                            filter_options=self.filter_options,
                                                            plot=self.ccplot)
            except Exception as e:
                self.logger.error(str(e))
                return None


            if coeffE>coeffN:
                dt_correct=dt_correctE
                coeff=coeffE
            else:
                dt_correct=dt_correctN
                coeff= coeffN

        if  coeff>self.cc_threshold:
            dtcc = (tt2+dt_correct)-(tt1)
            dtcat=tt2-tt1
            # return dt_correct,coeff,res_dt
            return [evid1,evid2,o1,o2,tt1,tt2,dtcc,coeff,dtcat,net,station,pha]
                    
        else: 
            return None
        
    
    
    
    
    def cal_cc_one_pair_readstreamfrommemory(self,evid1,evid2,o1,o2,tt1,tt2,t1,t2,net,station,pha):
        # 要比从文件读取速度快30秒 8800个波形 ,但没有完整测试成功                  
        # '''
        # t1: o1+tt1
        # t2: o2+tt2
        # '''
        stream1=self.preprocAllStream[evid1]
        stream2=self.preprocAllStream[evid2]
        if pha in ["P","Pn","Pg"]:
            try:
                tr1=stream1.select(network=net,station=station,component="Z")[0]
                tr2=stream2.select(network=net,station=station,component="Z")[0]
                dt_correct, coeff = self.ccfunc(t1, 
                                                            tr1,
                                                            t2, 
                                                            tr2, 
                                                            self.beforePpick, 
                                                            self.afterPpick,
                                                            self.cc_Pmaxlag,filter="bandpass",
                                                            filter_options=self.filter_options,
                                                            plot=self.ccplot)
            except Exception as e:
                self.logger.error(str(e))
                return None

                

        elif pha in ["S","Sn","Sg"]:
            try:
                tr1E=stream1.select(network=net,station=station,component="E")[0]
                tr2E=stream2.select(network=net,station=station,component="E")[0]
                dt_correctE, coeffE = self.ccfunc(t1, 
                                                            tr1E, 
                                                            t2, 
                                                            tr2E, 
                                                            self.beforeSpick, 
                                                            self.afterSpick,
                                                            self.cc_Smaxlag,
                                                            filter="bandpass",
                                                            filter_options=self.filter_options,
                                                            plot=self.ccplot)
            except Exception as e:
                self.logger.error(str(e))
                return None
            
            try:
                tr1N=stream1.select(network=net,station=station,component="N")[0]
                tr2N=stream2.select(network=net,station=station,component="N")[0]
                dt_correctN, coeffN = self.ccfunc(t1, 
                                                            tr1N, 
                                                            t2, 
                                                            tr2N, 
                                                            self.beforeSpick, 
                                                            self.afterSpick,
                                                            self.cc_Smaxlag,
                                                            filter="bandpass",
                                                            filter_options=self.filter_options,
                                                            plot=self.ccplot)
            except Exception as e:
                self.logger.error(str(e))
                return None


            if coeffE>coeffN:
                dt_correct=dt_correctE
                coeff=coeffE
            else:
                dt_correct=dt_correctN
                coeff= coeffN

        if  coeff>self.cc_threshold:
            dtcc = (tt2+dt_correct)-(tt1)
            dtcat=tt2-tt1
            # return dt_correct,coeff,res_dt
            return [evid1,evid2,o1,o2,tt1,tt2,dtcc,coeff,dtcat,net,station,pha]
                    
        else: 
            return None

    def cal_chunk_cc(self,onetask):
        coll_result=[]
        # 处理任务块中的每个波形对
        for par in tqdm.tqdm(onetask,total=len(onetask),desc="runing one task"):
        # for par in onetask :
            try:
                evid1, evid2, o1, o2, tt1, tt2, t1, t2, net, station, pha = par
                # 选择其中一种计算方法
                result = self.cal_cc_one_pair_readstreamfromfile(evid1, evid2, o1, o2, tt1, tt2, t1, t2, net, station, pha)
                # 或者使用内存版本
                # result = self.cal_cc_one_pair_readstreamfrommemory(evid1, evid2, o1, o2, tt1, tt2, t1, t2, net, station, pha)
                
                if result is not None:
                    coll_result.append(result)
            except Exception as e:
                print(f"处理单个波形对时出错: {e}")
                continue  # 继续处理下一个，不中断整个任务块
        
        print(f"任务块处理完成，有效结果: {len(coll_result)}/{len(onetask)}")
        return coll_result
            
    def run_parallel(self,pairs):
        # tasks=self.pairinfo
        tasks_chunk=[]
        start_time = time.time()
        
        if len(pairs) <= 1000:
            chunksize = max(50, len(pairs) // self.max_workers)
        else:
            chunksize = max(100, len(pairs) // self.max_workers)
    
        tasks_chunk = [pairs[i:i + chunksize] for i in range(0, len(pairs), chunksize)]
        total_chunks = len(tasks_chunk)
        print(f"波形对总数: {len(pairs)}")
        print(f"任务块数: {total_chunks}, 每块大小: {chunksize}")
        print(f"使用进程数: {self.max_workers}")
    
        futures=[]
        with ProcessPoolExecutor(max_workers=self.max_workers) as executor:
            for task in tasks_chunk:
                futures.append(executor.submit(self.cal_chunk_cc,task))
            start_time=time.time()
            # realtime to process task 
            for future in tqdm.tqdm(as_completed(futures),total=total_chunks,desc="processing tasks"):
                try:
                # if True:
                    results = future.result()
                    if len(results)>0 :
                        # write ccdt
                        for result in results: self._write_result(result)  
                except Exception as e:
                    print(f"处理波形对时出错: {e}")
        
        total_time = time.time() - start_time
        print("-" * 60)
        print(f"计算完成! 总耗时: {total_time:.2f}秒")
        print(f"平均每个波形对: {total_time/total_chunks*1000:.1f}毫秒")
        print(f"结果文件: {self.ccfile_out}")

    def run_single(self,pairs):
        """并行运行所有参数组合"""
        
        total_pairs = len(pairs)
        start_time = time.time()
        fid=open(self.ccfile_out,'w')
        with tqdm.tqdm(total=total_pairs) as pbar:
            for i in range(total_pairs):
                pbar.update(1)
                evid1,evid2,o1,o2,tt1,tt2,t1,t2,net,station,pha= pairs[i]
                result = self.cal_cc_one_pair_readstreamfrommemory(evid1,evid2,o1,o2,tt1,tt2,t1,t2,net,station,pha)
                # result = self.cal_cc_one_pair_readstreamfromfile(evid1,evid2,o1,o2,tt1,tt2,t1,t2,net,station,pha)
                
                if result is None: continue      
                evid1,evid2,o1,o2,tt1,tt2,dtcc,coeff,dtcat,net,station,pha = result
                fid.write(f"{evid1},{evid2},{o1},{o2},{tt1},{tt2},{dtcc},{coeff},{dtcat},{net},{station},{pha}\n")
                fid.flush()
            fid.close()
            end_time = time.time()
            self.logger.info(f"计算完成，用时 {end_time - start_time:.2f} 秒")
            print(f"计算完成，用时 {end_time - start_time:.2f} 秒")
def main(start,end,par):
    """主函数"""
    exe=ComputeCCDifferentialTime(par)
    # get evtime 
    exe.get_evtime()
    # get pair information 
    exe.get_pairs_fromDTCT()
    pairs=exe.pairinfo[start:end]
    # exe.run_single(pairs)
    exe.run_parallel(pairs)
    
if __name__ == "__main__":
    import sys
    start=0
    end=889908 
    par={'logfile':'./Dingri.log',
        'max_workers':20,
        'ccfile_out':'./cc.csv',
        'beforePpick':0.5,
        'afterPpick':0.5,
        'cc_Pmaxlag':0.3,

        'beforeSpick':0.5,
        'afterSpick':1.5,
        'cc_Smaxlag':0.5,
        'cc_threshold':0.5,
        'filter_options':{'freqmin': 0.5,   # bandpass filter 
                          'freqmax': 5,
                          'zerophase':True,
                          'corners':4},
        'ccfunc':xcorr_pick_correction_new,
        'ccplot':False,
        'samplingrate':100,
        'DataDir':'./DingriWaveform-Selected/',
        'ctpairfile':'./dt.ct',
        'hypodata':'./hypoDD.pha', # 
        # if pairfile does not exist, it will produce pairfile according to ctpairfile
        'pairfile':'pairs.pkl',
    }

    main(start,end,par)
