import subprocess
import sys
import os
from pathlib import Path
# 添加项目根路径到 sys.path，便于导入上层模块
current_dir = Path(__file__).resolve().parent
parent_dir = current_dir.parent
sys.path.append(str(parent_dir))

import argparse
import ntpath
from operator import itemgetter
import numpy as np
import sys, glob, os
import time
from astropy.table import Table, hstack, join, vstack
from astropy.time import TimeDelta
from astropy.io import fits
from astropy import units as u
from astropy.coordinates import match_coordinates_sky, SkyCoord
from astropy.time import Time
from tqdm import tqdm
from loguru import logger
import healpy as hp
from utils.Merge_catalog_Main import CatalogMerger
import configpath

from concurrent.futures import ProcessPoolExecutor, as_completed

class CatalogMerger_parallel(CatalogMerger):                 
    def process_one_group(self, file_group):
        if len(file_group) == 1:
            return

        mergeobj = file_group[0].split('/')[-1].split('_')[2]
        logger.info(f'Object ({mergeobj}): {len(file_group)} merged files start merging.')
        logger.info('--------------------------------------------------------------------')
        logger.info(f"mergefile: {file_group}")
        starteach = time.time()

        newtable, newtable_allband, savename = self.meph_merge(file_group)

        save_merge_catalog_file = os.path.join(self.filesavedir, savename)
        if not os.path.exists(save_merge_catalog_file):
            logger.info(f"savename: {save_merge_catalog_file}")

            primary_hdu = fits.PrimaryHDU()
            for i, filepath in enumerate(file_group):
                key = f'FILE{i+1:03d}'
                primary_hdu.header[key] = ntpath.basename(filepath)

            table_hdu = fits.BinTableHDU(newtable)
            hdul = fits.HDUList([primary_hdu, table_hdu])
            hdul.writeto(save_merge_catalog_file, overwrite=True)

        else:
            logger.warning(f"{save_merge_catalog_file} has already existed")

        # XMatch with Gaia XP
        save_match_file = save_merge_catalog_file.replace('.fits', '_gaia_xp.fits')
        save_match_all_file = save_merge_catalog_file.replace('.fits', '_allsource_gaia_xp.fits')

        if not os.path.exists(save_match_all_file):
            logger.info('Start xmatch with gaia and XP')
            match_xptable = self.match_gaiadr3_xp_1m6(save_merge_catalog_file, newtable_allband)
            match_xptable_allsource = self.match_gaiadr3_xp_1m6(save_merge_catalog_file, newtable)

            if match_xptable:
                match_xptable.writeto(save_match_file, overwrite=True)
                match_xptable_allsource.writeto(save_match_all_file, overwrite=True)

                self.save_region_file(Table(match_xptable[1].data), save_match_file.replace('.fits', '.reg'), 'red', 4.5)
                self.save_region_file(Table(match_xptable_allsource[1].data), save_match_all_file.replace('.fits', '.reg'), 'green', 4)
                logger.info(f"Xmatch done: {save_match_file}")
            else:
                logger.info(f"No XP matched for {savename}")

        endeach = time.time()
        logger.info(f'This merge took {endeach - starteach:.2f} s.')

    # def process(self, test=False, reDo=False):
    #     start = time.time()

    #     if not test:
    #         input_dir = f"{self.base_dir}/{self.version}/{self.date}/sci"
    #         if not os.path.exists(input_dir):
    #             logger.info(f"{input_dir} not exist")
    #             sys.exit()


    #     ############
    #     mergefile = self.findmergefile()
    #     ############
        
    #     mergefile = [group for group in mergefile if len(group) > 1]

    #     with ProcessPoolExecutor(max_workers=30) as executor:
    #         futures = [executor.submit(self.process_one_group, group) for group in mergefile]
    #         for f in tqdm(as_completed(futures), total=len(futures)):
    #             try:
    #                 f.result()
    #             except Exception as e:
    #                 logger.error(f"Exception in parallel processing: {e}")

    #     end = time.time()
    #     logger.info(f'🎉 All merge tasks completed in {end - start:.2f} s.')

    def process(self, test=False, reDo=False):
        start = time.time()

        if not test:
            input_dir = f"{self.base_dir}/{self.version}/{self.date}/sci"
            if not os.path.exists(input_dir):
                logger.info(f"{input_dir} not exist")
                sys.exit()

        mergefile = self.findmergefile()
        mergefile = [group for group in mergefile if len(group) > 1]

        success_count = 0
        fail_count = 0

        with ProcessPoolExecutor(max_workers=30) as executor:
            futures = {executor.submit(self.process_one_group, group): group for group in mergefile}
            for f in tqdm(as_completed(futures), total=len(futures)):
                try:
                    f.result()
                    success_count += 1
                except Exception as e:
                    logger.error(f"❌ Exception in parallel processing: {e}")
                    fail_count += 1

        end = time.time()
        logger.info(f'🎉 All merge tasks completed in {end - start:.2f} s.')
        logger.info(f"✅ 成功 {success_count} 组，❌ 失败 {fail_count} 组")

        # ===== 生成标志文件 =====
        flag_dir = Path(self.filesavedir)
        flag_dir.mkdir(parents=True, exist_ok=True)
        flag_file = flag_dir / f"MERGE_DONE_{self.date}_{self.version}.flag"
        with open(flag_file, "w") as f:
            f.write(f"Date={self.date}\n")
            f.write(f"Version={self.version}\n")
            f.write(f"Completed at {time.strftime('%Y-%m-%d %H:%M:%S')}\n")
            f.write(f"Success groups: {success_count}\n")
            f.write(f"Failed groups: {fail_count}\n")
        logger.info(f"📄 标志文件已生成: {flag_file}")

import argparse

if __name__ == "__main__":
    # 创建解析器对象
    parser = argparse.ArgumentParser(description="Merge catalogs based on observation date.")

    # 添加 --date 参数，设置为必需的参数，并提供默认值
    parser.add_argument(
        '--date', 
        type=str, 
        default='2025-01-01',  # 设置默认值
        required=False,  # 改为可选参数
        help='输入处理日期，格式为 YYYYMMDD (默认值: 20240209)'
    )

    # 添加 --version 参数，设置为必需的参数，并提供默认值
    parser.add_argument(
        '--version', 
        type=str, 
        default='V20250303',  # 设置默认值
        required=False,  # 改为可选参数
        help='版本号 (默认值: V20240815)'
    )

    # 添加 --catdir 参数，设置为可选参数，并提供默认值
    parser.add_argument(
        '--catdir', 
        type=str, 
        default=None,  # 设置默认值
        required=False,  # 设置为可选参数
        help='自定义的 catalog 文件目录路径 (默认值: /mnt/data/catalog_merge/images/1246989-76621/*sexcat_CalMag.fits)'
    )

    # 添加 --filesavedir 参数，设置为可选参数，并提供默认值
    parser.add_argument(
        '--filesavedir', 
        type=str, 
        default=configpath.filesavedir,  # 设置默认值
        required=False,  # 设置为可选参数
        help='自定义的输出文件保存目录路径 (默认值: ./mergecat/)'
    )

    # 解析命令行参数
    args = parser.parse_args()

    # 初始化并运行 CatalogMerger
    merger = CatalogMerger_parallel(
        base_dir=configpath.base_dir, 
        date=args.date, 
        version=args.version,
        catdir=args.catdir,  # 使用解析的 catdir 参数或默认值
        filesavedir=args.filesavedir,  # 使用解析的 filesavedir 参数或默认值
        path_cat=None,
        path_xpcat=None
    )
    merger.process(test=True)
