import os
import time
import json
import glob
import fcntl
import logging
import signal
import astropy.io.fits as fits
from datetime import datetime, timezone

# 健康状态监控
class HealthMonitor:
    def __init__(self):
        self.last_activity = datetime.now(timezone.utc)
        self.running = True
        
    def update_activity(self):
        self.last_activity = datetime.now(timezone.utc)
        
    def check_health(self):
        return (datetime.now(timezone.utc) - self.last_activity).total_seconds() < 60
        
    def shutdown(self):
        self.running = False

health_monitor = HealthMonitor()

def signal_handler(sig, frame):
    health_monitor.shutdown()
    logging.info("接收到终止信号，正在优雅关闭...")

signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)

# 配置日志记录
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(levelname)s - %(message)s',
    filename='/home/gwac/dwl/fit_monitor.log'
)
logger = logging.getLogger(__name__)

def load_config():
    config_path = os.path.join(os.path.dirname(__file__), 'config.json')
    try:
        with open(config_path) as f:
            config = json.load(f)
            # 设置默认值
            config.setdefault('scan_interval', 1)
            config.setdefault('max_retries', 3)
            config.setdefault('retry_delay', 5)
            return config
    except Exception as e:
        logging.error(f"加载配置文件失败: {str(e)}")
        raise

config = load_config()
PROCESS_DIR = config['processing_root']
STEPS = [
    "A_cat", "A_img_orig", "B_align_template_cat", "C_align",
    "D_combine", "E_diff_align", "F_diff", "G_diff_cat",
    "H_wcs_template", "I_resi_object_win", "J_resi_object_classify"
]

def getCurrentDayStr():
    current_utc = datetime.now(timezone.utc)
    return current_utc.strftime('%y%m%d')

def getTodayStorePathBySearch(rootPath):
    todayStorePath = ""
    startObs = False
    templateFileName = "G005_054_241019"  
    templateFileNameLen = len(templateFileName)  
    templateFileNameSplitNum = len(templateFileName.split("_"))  
    dirPool = [rootPath]  
    curDateStr = getCurrentDayStr()

    for trootDir in dirPool:  
        if not os.path.exists(trootDir):  
            continue  
        obsDays = os.listdir(trootDir)  
        for obsDay in obsDays:
            if (obsDay.find(curDateStr) > 0 and 
                len(obsDay) == templateFileNameLen and 
                len(obsDay.split("_")) == templateFileNameSplitNum):  
                todayStorePath = f"{trootDir}/{obsDay}"
                startObs = True
                break
        if startObs:
            break
    return startObs, todayStorePath

def check_processing_steps(field_id, filename_prefix):
    processing_date = getCurrentDayStr()
    field_dir = f"{PROCESS_DIR}/{processing_date}/*_{field_id}_*"
    step_status = {}
    
    try:
        field_dir_path = glob.glob(field_dir)[0]
        for step in STEPS:
            step_path = f"{field_dir_path}/{step}/{filename_prefix}*"
            step_files = glob.glob(step_path)
            step_status[step] = "yes" if step_files else "none"
    except IndexError:
        for step in STEPS:
            step_status[step] = "none"
    
    return step_status

def get_file_mtime(file_path):
    try:
        return os.path.getmtime(file_path)
    except OSError:
        return 0

def monitor_fits_images(obs_dir):
    processed_files = {}  # 记录已处理文件及其最后修改时间
    retry_count = 0
    
    while health_monitor.running:
        health_monitor.update_activity()
        try:
            if not os.path.exists(obs_dir):
                time.sleep(1)
                continue
                
            current_files = [f for f in os.listdir(obs_dir) if f.endswith('.fit')]
            new_or_updated = []
            
            # 检测新文件或修改过的文件
            for f in current_files:
                file_path = os.path.join(obs_dir, f)
                current_mtime = get_file_mtime(file_path)
                if f not in processed_files or processed_files[f] < current_mtime:
                    new_or_updated.append(f)
                    processed_files[f] = current_mtime
            
            # 尝试读取现有的JSON报告文件
            output_file = f"/home/gwac/dwl/{getCurrentDayStr()}.json"
            report_data = {}
            if os.path.exists(output_file):
                try:
                    with open(output_file, 'r') as f:
                        fcntl.flock(f, fcntl.LOCK_SH)
                        report_data = json.load(f)
                        fcntl.flock(f, fcntl.LOCK_UN)
                except (IOError, json.JSONDecodeError) as e:
                    logger.error(f"读取报告文件失败: {str(e)}")
            
            # 只处理新文件或修改过的文件
            for fits_file in new_or_updated:
                file_path = os.path.join(obs_dir, fits_file)
                try:
                    # 使用memmap=False提高大文件读取性能
                    with fits.open(file_path, memmap=False) as hdul:
                        header = hdul[0].header
                        field_id = header.get('FIELD_ID', 'UNKNOWN')
                        date_obs = header.get('DATE-OBS', 'UNKNOWN')
                        time_obs = header.get('TIME-OBS', 'UNKNOWN')
                        
                        filename_prefix = fits_file.split('.')[0]
                        step_status = check_processing_steps(field_id, filename_prefix)
                        
                        report_data[fits_file] = {
                            "DATE-OBS": date_obs,
                            "TIME-OBS": time_obs,
                            "steps": step_status,
                            "last_processed": datetime.now(timezone.utc).isoformat()
                        }
                except Exception as e:
                    logger.error(f"处理文件{fits_file}失败: {str(e)}")
                    report_data[fits_file] = {
                        "error": str(e),
                        "last_attempt": datetime.now(timezone.utc).isoformat()
                    }
            
            output_file = f"/home/gwac/dwl/{getCurrentDayStr()}.json"
            try:
                with open(output_file, 'w') as f:
                    fcntl.flock(f, fcntl.LOCK_EX)  # 获取文件锁
                    json.dump(report_data, f, indent=4)
                    fcntl.flock(f, fcntl.LOCK_UN)  # 释放文件锁
                logger.info(f"成功写入报告文件: {output_file}")
            except IOError as e:
                logger.error(f"写入报告文件失败: {str(e)}")
                
            time.sleep(config['scan_interval'])
            retry_count = 0  # 重置重试计数器
        except Exception as e:
            logger.error(f"监控过程中发生错误: {str(e)}", exc_info=True)
            retry_count += 1
            if retry_count >= config['max_retries']:
                logger.error(f"达到最大重试次数({config['max_retries']})，停止监控")
                break
            time.sleep(config['retry_delay'])

if __name__ == "__main__":
    root_path = input("请输入相机服务器观测目录的根路径: ")
    start_obs, today_path = getTodayStorePathBySearch(root_path)
    if start_obs:
        print(f"开始监控目录: {today_path}")
        monitor_fits_images(today_path)
    else:
        print("未找到当天的观测目录")