import subprocess
import sys
import os
from pathlib import Path
# 添加项目根路径到 sys.path，便于导入上层模块
current_dir = Path(__file__).resolve().parent
parent_dir = current_dir.parent
sys.path.append(str(parent_dir))
import argparse
import ntpath
from operator import itemgetter
import numpy as np
import sys, glob, os
import time
from astropy.table import Table, hstack, join, vstack
from astropy.time import TimeDelta
from astropy.io import fits
from astropy import units as u
from astropy.coordinates import match_coordinates_sky, SkyCoord
from astropy.time import Time
from tqdm import tqdm
from loguru import logger
import healpy as hp
import astropy
import configpath
from utils.tools import SimultaneousObservationFinder
class CatalogMerger:
    def __init__(self, base_dir, date, version, catdir=None,filesavedir=None, log_dir="log/merge", sep=1.0, match_r=1.0,path_cat=None,path_xpcat=None):
        self.base_dir = base_dir
        self.date = date
        self.version = version
        self.sep = sep
        self.match_r = match_r
        self.log_dir = log_dir
        if filesavedir:
            self.filesavedir=os.path.join(filesavedir,date.replace("-",""))
        else:
            self.filesavedir =os.path.join( f'{self.base_dir}/{self.version}/{self.date}/sci_products/merge_catalog/',date.replace("-",""))

        
        # 如果提供了 catdir，就使用它，否则使用默认路径
        if catdir:
            self.catdir = catdir
        else:
            self.catdir = f'{self.base_dir}/{self.version}/{self.date}/sci/*/*/*sexcat_CalMag.fits'

        # 创建日志目录
        os.makedirs(self.log_dir, exist_ok=True)  # 如果目录不存在，则创建
        log_file = os.path.join(self.log_dir, "{time:YYYY-MM-DD}.log")
        logger.add(log_file, rotation="00:00", retention="10 days", level="INFO", compression="zip")
        if path_cat:
            self.path_cat=path_cat
        else:
            self.path_cat=configpath.path_cat
        if path_xpcat:
            self.path_xpcat=path_xpcat
        else:
            self.path_xpcat=configpath.path_xpcat #表里增加了XP谱流量信息
        
        # 确保保存目录存在
        if not os.path.exists(self.filesavedir):
            os.makedirs(self.filesavedir, exist_ok=True)
        
        logger.info(f"filesavedir:{self.filesavedir }")
        logger.info(f"catdir:{self.catdir}")
        logger.info(f"path_cat:{self.path_cat}")
        logger.info(f"path_xpcat:{self.path_xpcat}")
        
    def rename_duplicates(self, my_list):
        counts = {}
        new_list = []

        for item in my_list:
            if item not in counts:
                counts[item] = 1
            else:
                counts[item] += 1
        num = list(counts.values())
        for item in my_list:
            if counts[item] == 1:
                new_list.append(item)
            else:
                counts[item] += 1
                idx = np.where(np.array(list(counts.keys())) == item)[0][0]
                new_list.append(f"{item}_{counts[item] - int(num[idx])}")

        return new_list

    def sortband(self, a):
        # 星表按uvgriz排序
        return {'u': 1, 'v': 2, 'g': 3, 'r': 4, 'i': 5, 'z': 6}.get(a, 7)

    def save_region_file(self, sources, filename, color, radiu):
        with open(filename, 'w') as f:
            f.write("# Region file format: DS9 version 4.1\n")
            f.write(f'global color={color} font="helvetica 10 normal" select=1 highlite=1 edit=1 move=1 delete=1 include=1 fixed=0 source\n')
            f.write("fk5\n")
            for ra, dec in zip(sources['ALPHA_J2000'].data, sources['DELTA_J2000'].data):
                f.write(f'circle({ra},{dec},{radiu}")\n')
                
                
    def airmassFunc(self,hour_angle,latitude,dec):
    
        #airmass at hour angle t
    
        x = 1/(np.sin(latitude)*np.sin(dec)+np.cos(latitude)*np.cos(dec)*np.cos(hour_angle))  #t is hour angle
        delta_x = 0.00186*(x-1)+0.002875*((x-1)**2)+0.0008083*((x-1)**3)
        airmass = x - delta_x
        #return float(airmass)
        return airmass

    def camFunc(self,obstime,exp_time,ra,dec):
        #ra = float(ra)*(np.pi/180)
        #dec = float(dec)*(np.pi/180)

        ra = ra*(np.pi/180)
        dec = dec*(np.pi/180)

        ###calculating the sidereal time at the beginning of observation###
        lon_deg,lon_min,lon_sec = 100,1,48
        longitude = (float(lon_deg)+float(lon_min)/60+float(lon_sec)/3600)
        lat_d,lat_arcm,lat_arcs = 26,42,32
        latitude = (float(lat_d)+float(lat_arcm)/60+float(lat_arcs)/3600)*(np.pi/180)
        time_standard = 'utc'

        #the jd/mgst/lmst/tdb at the beginning of exposure
        #obstime = '{}-{}-{}T{}:{}:{}'.format(int(year), int(month), int(day), int(hour), int(minute), float(second))
        scale = time_standard
        location = ('{}d'.format(str(longitude)),'{}d'.format(str(latitude/(np.pi/180))))
        obstime_start = Time(obstime,format="isot",scale=scale,location=location)

        ut1_start = obstime_start.ut1.iso
        jd_start = obstime_start.ut1.jd
        gmst_start = obstime_start.ut1.sidereal_time('mean', 'greenwich','IAU2006')
        lmst_start = obstime_start.ut1.sidereal_time('mean',model='IAU2006')
        tdb_start = obstime_start.ut1.tdb.jd#iso

        #the jd/mgst/lmst/tdb at the mid-point of exposure
        mean_second = exp_time/2
        delta_time_mid = TimeDelta(mean_second,format='sec')
        obstime_mid = obstime_start + delta_time_mid

        ut1_mid = obstime_mid.ut1.iso
        jd_mid = obstime_mid.ut1.jd
        gmst_mid = obstime_mid.ut1.sidereal_time('mean', 'greenwich','IAU2006')
        lmst_mid = obstime_mid.ut1.sidereal_time('mean',model='IAU2006')
        tdb_mid = obstime_mid.ut1.tdb.jd
        #tdb_mid = obstime_mid.ut1.tdb

        ###the hour angle (equal time interval)###
        n = 5
        lmst_hour = np.array(lmst_start)*15*(np.pi/180)
        t0 = lmst_hour - ra  # initial hour angle h
        hour_angle = []
        for i in np.arange(0,(float(exp_time)+float(exp_time)/n),(float(exp_time)/n)):
                t = t0 + (i*15/3600)*(np.pi/180)
                hour_angle.append(t)

        ###method 1:calculating the airmass by chosing the average time point###
        t_start = hour_angle[0]     #the beginning point of hour angle of observation
        t_end = hour_angle[len(hour_angle)-1]    #the ending point of hour angle of observation
        ave_t1 = (t_end+t_start)/2

        airmass_start = self.airmassFunc(t_start,latitude,dec)
        airmass_end = self.airmassFunc(t_end,latitude,dec)
        airmass_mean = self.airmassFunc(ave_t1,latitude,dec)
        #ave_airmass = round((airmass_start + 4*airmass_mean + airmass_end)/6,4)
        ave_airmass = (airmass_start + 4*airmass_mean + airmass_end)/6
        # return ave_airmass,obstime_start.value,jd_start,tdb_start
        return ave_airmass,obstime_mid.value,jd_mid,tdb_mid #ave_airmass,obstime_mid.value,jd_mid,tdb_mid

    def meph_merge(self, catmerge):
        table=Table()
        for i in range(len(catmerge)):
               catalog=Table.read(catmerge[i],hdu=2)
               length=len(astropy.table.unique(catalog,'ALPHA_J2000'))
               #print(length)	
               if length==1:
                    catalog=astropy.table.unique(catalog,'ALPHA_J2000')
			
               table=vstack([table,catalog])

        ra = table['ALPHA_J2000'].data
        dec = table['DELTA_J2000'].data
        cood = SkyCoord(ra=ra * u.degree, dec=dec * u.degree, frame='icrs')
        idx1, idx2, d2d0, d3d0 = cood.search_around_sky(cood, seplimit=self.sep * u.arcsec)
        RA, DEC = [], []
        mask = np.ones(len(cood), dtype=bool)
        for j in range(len(ra)):
            if mask[j]:
                idx = np.where(idx1 == j)[0]
                RA.append(np.mean(ra[idx2[idx]]))
                DEC.append(np.mean(dec[idx2[idx]]))
                mask[idx2[idx]] = False
        
        cood1 = SkyCoord(ra=RA * u.degree, dec=DEC * u.degree, frame='icrs')
        table0 = Table()
        table0['STAR_ID'] = np.arange(len(RA))
        table0['ALPHA_J2000'] = RA
        table0['DELTA_J2000'] = DEC

        mergetable = table0
        mergetable_allband = table0
        Band = []
        for si in range(len(catmerge)):
            catdata = fits.open(catmerge[si])
            headeri = catdata[1].data
            bandidx = np.where(headeri.field(0).find('FILTER')[0] == 0)[0][0]
            bandi = headeri.field(0)[0, bandidx].split('=')[-1].replace("'", "").strip()
            Band.append(bandi)
        
        catmerge = [(catmerge[i], self.sortband(Band[i])) for i in range(len(catmerge))]
        catmerge = sorted(catmerge, key=itemgetter(1))
        catmerge = [catmerge[i][0] for i in range(len(catmerge))]
        
        reband = [(Band[i], self.sortband(Band[i])) for i in range(len(catmerge))]
        reband = sorted(reband, key=itemgetter(1))
        reband = [reband[i][0] for i in range(len(reband))]
        reband = self.rename_duplicates(reband)

        objname, time_ser, Filter = [], [], []
        for mi in range(len(catmerge)):
            filenamei = catmerge[mi].split('/')[-1].split('.fits')[0]
            catdata = fits.open(catmerge[mi])
            headeri = catdata[1].data

            objnameidx = np.where(headeri.field(0).find('OBJECT')[0] == 0)[0][0]
            objnamei = headeri.field(0)[0, objnameidx].split('=')[-1].replace("'", "").strip()
            objname.append(objnamei)
            
            bandidx = np.where(headeri.field(0).find('FILTER')[0] == 0)[0][0]
            bandi = headeri.field(0)[0, bandidx].split('=')[-1].replace("'", "").strip()
            Filter.append(bandi)
            bandi = reband[mi]

            airmassidx = np.where(headeri.field(0).find('AIRMASS')[0] == 0)[0][0]
            airmassi = float(headeri.field(0)[0, airmassidx].split('=')[-1].replace("'", "").split('/')[0])

            try:
                startimeidx = np.where(headeri.field(0).find('FRAME')[0] == 0)[0][0]
            except:
                startimeidx = np.where(headeri.field(0).find('DATE')[0] == 0)[0][0]

            startimei = headeri.field(0)[0, startimeidx].split('=')[-1].split('/')[0].replace("'", "")
            utc = Time(startimei.strip(), format='isot', scale='utc')
            headertimejd = utc.jd

            filetime = catmerge[mi].split('/')[-1].split('_')[4]
            filetime = f"{filetime[0:4]}-{filetime[4:6]}-{filetime[6:8]}T{filetime[8:10]}:{filetime[10:12]}:{filetime[12:]}"
            fileutc = Time(filetime.strip(), format='isot', scale='utc')
            filetimejd = fileutc.jd
            if abs(filetimejd - headertimejd) > 10 / 60 / 24:
                utcs = fileutc.value
            else:
                utcs = utc.value

            time_ser.append(utcs.replace('-', '').replace(':', '').replace('T', '').split('.')[0])

            exptimeidx = np.where(headeri.field(0).find('EXPOSURE')[0] == 0)[0][0]
            exptimei = float(headeri.field(0)[0, exptimeidx].split('=')[-1].split('/')[0].replace("'", ""))

            mtable0 = Table.read(catmerge[mi], hdu=2)
            rai=mtable0['ALPHA_J2000'].data
            deci=mtable0['DELTA_J2000'].data
            filename = [filenamei] * len(mtable0)
            exptime = [exptimei] * len(mtable0)
            startime = [utcs] * len(mtable0)
            #airmass = [airmassi] * len(mtable0)
            airmass=self.camFunc(utcs,exptimei,rai,deci)[0]
            columns0 = mtable0.colnames
            
            mtable=Table()
            
            need_column=configpath.need_column
            
            
            for coi in range(len(need_column)):
                 mtable[need_column[coi]]=mtable0[need_column[coi]]
            
            columns = mtable.colnames
            columns = [columns[i] + '_' + bandi for i in range(len(columns))]
            mtable = Table(mtable, names=columns)
            mtable.add_columns([filename, startime, exptime, airmass], names=['filename_' + bandi, 'startime_' + bandi, 'exptime_' + bandi, 'airmass_' + bandi])
            coodi = SkyCoord(ra=mtable['ALPHA_J2000_' + bandi].data * u.degree, dec=mtable['DELTA_J2000_' + bandi].data * u.degree, frame='icrs')
            idx, d2d, d3d = match_coordinates_sky(coodi, cood1)
            mtable.add_column(idx, name='STAR_ID')

            mergetable = join(mergetable, mtable, keys='STAR_ID', join_type='left')
            mergetable_allband = join(mergetable_allband, mtable, keys='STAR_ID')
  
        fname = f'sc_{objname[0]}_{"".join(Filter)}_{time_ser[0]}.fits'
        logger.warning(fname)
        logger.info(f'objname:{objname},Filter:{Filter},time_ser:{time_ser}')

        return mergetable, mergetable_allband,fname
    
    def findmergefile(self):

        
        finder = SimultaneousObservationFinder(db_url=configpath.db_url, date_str=self.date, delta_sec=5,version="V20250303")
        finder.run()
        all_filename_arrays=finder.print_groups()
        finder.check_grouping_coverage()
        
        finder.save_all_to_txt_detailed_with_summary(f"./log/output_data/{self.date}_simultaneous_observations.txt")

        return all_filename_arrays

    def match_gaiadr3_xp_1m6(self, source,mergetable):
        path_cat=self.path_cat
        path_xpcat=self.path_xpcat
        catdata = fits.open(source)
        filename = os.path.basename(source)
        startimei = filename.split('_')[-1].split('.fits')[0]
        obsdate=int(startimei[0:8])
        obsyear = int(startimei[0:4])
        obsmon = int(startimei[4:6])
        refyear = str(obsyear)
        if obsmon > 6:
            refyear = str(obsyear + 1)
            
            
        band=filename.split('_')[2]
        band=np.unique(np.array([i for i in band]))
        
        reband=[(band[i] ,self.sortband(band[i])) for i in range(len(band))]	
        reband=sorted(reband, key=itemgetter(1))
        reband=[reband[i][0] for i in range(len(reband))]
        bands=self.rename_duplicates(reband)

        data = mergetable
        ra1 = data['ALPHA_J2000'].data
        dec1 = data['DELTA_J2000'].data
        if (dec1 > 90.0).any() or (dec1 < -90.0).any() or (ra1 > 360.0).any() or (ra1 < 0.0).any():
            logger.warning('Astrometry error')
            return None
        ra_cen = np.median(ra1)
        dec_cen = np.median(dec1)
        c0 = SkyCoord(ra_cen * u.deg, dec_cen * u.deg, frame='icrs')
        c = SkyCoord(ra=np.array(ra1) * u.degree, dec=np.array(dec1) * u.degree, frame='icrs')

        catinfo = fits.open(os.path.join(path_cat , 'gaiadr3catalog.info'))
        cramax = catinfo[1].data['ramax']
        cramin = catinfo[1].data['ramin']
        cdecmax = catinfo[1].data['decmax']
        cdecmin = catinfo[1].data['decmin']
        idxca = np.where((c0.ra.degree >= cramin - 1 / np.cos(np.radians(c0.dec.degree))) &
                         (c0.ra.degree <= cramax + 1 / np.cos(np.radians(c0.dec.degree))) &
                         (c0.dec.degree >= cdecmin - 1) & (c0.dec.degree <= cdecmax + 1))[0]

        catalogname = catinfo[1].data['name'][idxca]

        cattable = [Table()] * len(idxca)
        #logger.info(f"catalogname:{catalogname}")
        for i in range(len(idxca)):
            readCatalogFile=os.path.join(path_cat , catalogname[i])
            cattablei = Table.read(readCatalogFile)
            rai2000 = np.array(cattablei['ra'])
            deci2000 = np.array(cattablei['dec'])
            rai = np.array(cattablei['ra_J' + refyear])
            deci = np.array(cattablei['dec_J' + refyear])
            idx999 = np.where((rai == 999.0) | (deci == 999.0))[0]
            rai[idx999] = rai2000[idx999]
            deci[idx999] = deci2000[idx999]

            clog = SkyCoord(ra=rai * u.degree, dec=deci * u.degree)
            region = c0.separation(clog) < 40 * u.arcminute
            idxdi = np.where(region == True)[0]
            if len(idxdi) == 0:
                continue
            else:
                tablei = cattablei[idxdi]
                cattable[i] = tablei
        catalog = cattable[0]
        for j in range(1, len(cattable)):
            catalog = vstack([catalog, cattable[j]])
        if len(catalog) == 0:
            logger.info('No match catalog data')
            return None
        else:
            ra22000 = np.array(catalog['ra'])
            dec22000 = np.array(catalog['dec'])
            ra2 = np.array(catalog['ra_J' + refyear])
            dec2 = np.array(catalog['dec_J' + refyear])
            idx9992 = np.where((ra2 == 999.0) | (dec2 == 999.0))[0]
            ra2[idx9992] = ra22000[idx9992]
            dec2[idx9992] = dec22000[idx9992]

            clog = SkyCoord(ra=ra2 * u.degree, dec=dec2 * u.degree)
            idx, d2d, d3d = c.match_to_catalog_sky(clog)

            index = np.where(d2d.arcsec < self.match_r)[0]

            tablel = data[index]
            tabler0 = catalog[idx[index]]
            if len(tablel) == 0 or len(tabler0) == 0:
                logger.info('No match data')
                return None
            try:
                number = tablel['sourceid']
            except:
                number = tablel['STAR_ID']
            offset = d2d.arcsec[index]
            #添加需要的列
            need_column=['source_id','phot_g_mean_mag','phot_bp_mean_mag','phot_rp_mean_mag','in_galaxy_candidates','in_qso_candidates','parallax_over_error','parallax','parallax_error','has_xp_continuous']
            tabler=Table()
            for coi in range(len(need_column)):
                  tabler[need_column[coi]]=tabler0[need_column[coi]]
            
            #tabler.add_column(offset, name='sepdis')
            tabler.add_column(number, name='STAR_ID')
            
            
            
            newtable = join(data, tabler, keys='STAR_ID')


            logger.info(f'Original Num: {len(data)}, xmatch gaia result Num: {len(newtable)}')
        camera='mephisto'
        data = newtable
        ra=data['ALPHA_J2000'].data.data
        dec=data['DELTA_J2000'].data.data
        pix=hp.ang2pix(256,ra,dec,lonlat=True,nest=True)
        pixu=np.unique(pix)
        n=len(pixu)
        hasxp=np.array(data['has_xp_continuous']) 
        nxp=len(hasxp[hasxp==b'True'])
        
        path_catalog=glob.glob(path_xpcat+'*.fits')
        path_catalog=np.array([path_catalog[i].split('/')[-1] for i in range(len(path_catalog))])
        fnameu=catalogname
        idx=np.isin(path_catalog,fnameu)
        if len(idx[idx==True])==0:
               print('No XP spec in match')
               return None
        else:
		
               xtable=Table()

               for ti in range(len(fnameu)):
                       try:
                               tabi=Table.read(path_xpcat+fnameu[ti].decode('utf-8'))
                       except:
                               tabi=Table.read(path_xpcat+fnameu[ti])
                       xtabi=join(data,tabi,keys='source_id') 
                       #xtable=vstack([xtable,xtabi])

                       sourceid0=tabi['source_id'].data
                       sourceidi=xtabi['source_id'].data
                       idxi = np.isin(sourceid0, sourceidi)
                       xtabii=tabi[idxi]
                       xtable=vstack([xtable,xtabii])

               xtable0=Table()
               xtable0['source_id']=xtable['source_id']
               xtable0['ra']=xtable['ra']
               xtable0['dec']=xtable['dec']
               #xtable0['flux_fit']=xtable['flux_fit']
               #xtable0['flux_fit_err']=xtable['flux_fit_err']

               if camera=='mephisto' and obsdate<=20230308:
                       for bi in range(len(bands)):
                             xtable0['snr_XP_'+bands[bi]]=xtable['snr_XP_'+bands[bi]+'_e2v']
                             xtable0['xp_slope_'+bands[bi]]=xtable['k_'+bands[bi]+'_e2v']
                             xtable0['xp_slope_err'+bands[bi]]=xtable['kerr_'+bands[bi]+'_e2v']
                             xtable0['abmag_xpsp_'+bands[bi]]=xtable[bands[bi]+'_abmag_'+'andor0']
                             xtable0['abmagerr_xpsp_'+bands[bi]]=xtable[bands[bi]+'_abmagerr_'+'andor0']
			
               if camera=='mephisto' and obsdate>20230308 and obsdate<=20231227:
                       for bi in range(len(bands)):
                             xtable0['snr_XP_'+bands[bi]]=xtable['snr_XP_'+bands[bi]+'_e2v']
                             xtable0['xp_slope_'+bands[bi]]=xtable['k_'+bands[bi]+'_e2v']
                             xtable0['xp_slope_err_'+bands[bi]]=xtable['kerr_'+bands[bi]+'_e2v']
                             xtable0['abmag_xpsp_'+bands[bi]]=xtable[bands[bi]+'_abmag_'+'andor1']
                             xtable0['abmagerr_xpsp_'+bands[bi]]=xtable[bands[bi]+'_abmagerr_'+'andor1']
               if camera=='mephisto' and obsdate>20231227:
                       for bi in range(len(bands)):
                             xtable0['snr_XP_'+bands[bi]]=xtable['snr_XP_'+bands[bi]+'_e2v']
                             xtable0['xp_slope_'+bands[bi]]=xtable['k_'+bands[bi]+'_e2v']
                             xtable0['xp_slope_err_'+bands[bi]]=xtable['kerr_'+bands[bi]+'_e2v']
                             xtable0['abmag_xpsp_'+bands[bi]]=xtable[bands[bi]+'_abmag_'+'e2v']
                             xtable0['abmagerr_xpsp_'+bands[bi]]=xtable[bands[bi]+'_abmagerr_'+'e2v']

               #和XP交叉时只保留需要用到的列（需要额外需要的列可以在这里增加）	
               xtable0['EBV_green19']=xtable['EBV_green19']
               xtable0['EBV_chen19']=xtable['EBV_chen19']
               xtable0['EBV_guo21']=xtable['EBV_guo21']
               xtable0['EBV_xiang24']=xtable['EBV_xiang24']
               xtable0['EBV_med']=xtable['EBV_med']
               xtable0['C2']=xtable['C2']
               xtable0['C3']=xtable['C3']
               xtable0['Caution']=xtable['Caution']
		
               #xtable=join(data,xtable,join_type='left',keys='source_id')
               xtable=join(data,xtable0,keys='source_id')
	
	
               if len(xtable) == 0:
                     logger.info('No XP spec in match')
                     return None
               else:
                     logger.info(f'Original gaia Num: {len(data)}, xmatch xp result Num: {len(xtable)}, has_xp_continuous num: {nxp}')

                     hdr0 = fits.PrimaryHDU(header=catdata[0].header)
                     hdr1 = fits.BinTableHDU(xtable)
                     xmatch_merge_xptab = fits.HDUList([hdr0, hdr1])
                     return xmatch_merge_xptab
                     
    def process(self,test=False,reDo=False):
        start = time.time()
        if not test:
            if not os.path.exists(f"{self.base_dir}/{self.version}/{self.date}/sci"):
                logger.info(f"{self.base_dir}/{self.version}/{self.date}/sci not exist")
                sys.exit()

        mergefile = self.findmergefile()

        for mi in tqdm(range(len(mergefile))):
            if len(mergefile[mi]) == 1:
                continue

            mergeobj = mergefile[mi][0].split('/')[-1].split('_')[2]
            logger.info(f'Object ({mergeobj}): {len(mergefile[mi])} merged files start merging.')
            logger.info('--------------------------------------------------------------------')
            logger.info(f"mergefile[mi]: {mergefile[mi]}")
            starteach=time.time()
            newtable, newtable_allband,savename = self.meph_merge(mergefile[mi])
            endeach = time.time()
            usetimeeach = endeach - starteach
            logger.info(f'This merge process took a total of {usetimeeach: .2f} seconds.')
            
            save_merge_catalog_file = os.path.join(self.filesavedir, savename)
            if not os.path.exists(save_merge_catalog_file) or reDo==True:
                logger.info(f"savename: {save_merge_catalog_file}")

                primary_hdu = fits.PrimaryHDU()
                for i, filepath in enumerate(mergefile[mi]):
                    key = f'FILE{i+1:03d}'
                    primary_hdu.header[key] = ntpath.basename(filepath)


                table_hdu = fits.BinTableHDU(newtable)
                hdul = fits.HDUList([primary_hdu, table_hdu])

                hdul.writeto(save_merge_catalog_file, overwrite=True)

            else:
                logger.warning(f"{save_merge_catalog_file} has already existed")

            save_merge_catalog_matchxp_file = os.path.join(self.filesavedir, savename.replace('.fits', '_gaia_xp.fits'))
            save_merge_catalog_matchxp_file_allsource = os.path.join(self.filesavedir, savename.replace('.fits', '_allsource_gaia_xp.fits'))
            if not os.path.exists(save_merge_catalog_matchxp_file_allsource):
                logger.info('Start xmatch with gaia and XP')
                startmatch = time.time()
            
                match_xptable = self.match_gaiadr3_xp_1m6(save_merge_catalog_file,newtable_allband) #交集星表和gaia，XP交叉
                match_xptable_allsource = self.match_gaiadr3_xp_1m6(save_merge_catalog_file,newtable) #增加并集的星表和gaia，XP交叉
                if match_xptable:
                    match_xptable.writeto(save_merge_catalog_matchxp_file, overwrite=True)
                    match_xptable_allsource.writeto(save_merge_catalog_matchxp_file_allsource, overwrite=True)
                    endmatch = time.time()
                    usetimematch = endmatch - startmatch
                    # 保存region文件
                    self.save_region_file(Table(match_xptable[1].data), os.path.join(self.filesavedir, savename.replace('.fits', '_gaia_xp.reg')), 'red', 4.5)
                    self.save_region_file(Table(match_xptable_allsource[1].data), os.path.join(self.filesavedir, savename.replace('.fits', '_allsource_gaia_xp.reg')), 'green', 4)
                    logger.info(f'This xmatch process took a total of {usetimematch: .2f} seconds.')
                else:
                    logger.info(f'No Xp')

        end = time.time()
        usetime = end - start
        logger.info(f'This merger process took a total of {usetime:.2f} seconds.')
        logger.info('--------------------------------------------------------------------')


import argparse

if __name__ == "__main__":
    # 创建解析器对象
    parser = argparse.ArgumentParser(description="Merge catalogs based on observation date.")

    # 添加 --date 参数，设置为必需的参数，并提供默认值
    parser.add_argument(
        '--date', 
        type=str, 
        default='2025-01-01',  # 设置默认值
        required=False,  # 改为可选参数
        help='输入处理日期，格式为 YYYYMMDD (默认值: 20240209)'
    )

    # 添加 --version 参数，设置为必需的参数，并提供默认值
    parser.add_argument(
        '--version', 
        type=str, 
        default='V20250303',  # 设置默认值
        required=False,  # 改为可选参数
        help='版本号 (默认值: V20240815)'
    )

    # 添加 --catdir 参数，设置为可选参数，并提供默认值
    parser.add_argument(
        '--catdir', 
        type=str, 
        default=None,  # 设置默认值
        required=False,  # 设置为可选参数
        help='自定义的 catalog 文件目录路径 (默认值: /mnt/data/catalog_merge/images/1246989-76621/*sexcat_CalMag.fits)'
    )

    # 添加 --filesavedir 参数，设置为可选参数，并提供默认值
    parser.add_argument(
        '--filesavedir', 
        type=str, 
        default=configpath.filesavedir,  # 设置默认值
        required=False,  # 设置为可选参数
        help='自定义的输出文件保存目录路径 (默认值: ./mergecat/)'
    )

    # 解析命令行参数
    args = parser.parse_args()

    # 初始化并运行 CatalogMerger
    merger = CatalogMerger(
        base_dir=configpath.base_dir, 
        date=args.date, 
        version=args.version,
        catdir=args.catdir,  # 使用解析的 catdir 参数或默认值
        filesavedir=args.filesavedir,  # 使用解析的 filesavedir 参数或默认值
        path_cat=None,
        path_xpcat=None
    )
    merger.process(test=True)
