from astropy.io import fits
from astropy.stats import mad_std
from astropy.visualization import ZScaleInterval
from photutils.detection import DAOStarFinder
from scipy import ndimage
import base64
import io
import json
import logging
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import os, datetime
import shutil
import zipfile
from hp_fixar_v01 import config


matplotlib.use('Agg')  # 使用非交互式后端

logger = logging.getLogger(__name__)

def estimate_source_radius(data, fwhm_guess=3.0, threshold_sigma=5.0, max_radius=20):
    bkg_sigma = mad_std(data)
    daofind = DAOStarFinder(fwhm=fwhm_guess, threshold=threshold_sigma * bkg_sigma)
    sources = daofind(data - np.median(data))

    if sources is None or len(sources) == 0:
        print("\u26a0\ufe0f photutils未检测到亮源，使用默认半径 r=3")
        return 3

    if 'npix' not in sources.colnames:
        print("\u26a0\ufe0f 源表中缺失 npix 字段，无法估计半径，使用默认 r=3")
        return 3

    areas = np.array(sources['npix'])
    estimated_radii = np.sqrt(areas / np.pi)
    median_r = np.median(estimated_radii)
    r = int(np.clip(median_r, 2, max_radius))
    return r

def make_annular_kernel(size, r_inner, r_outer):
    center = size // 2
    y, x = np.ogrid[:size, :size]
    dist = np.sqrt((x - center)**2 + (y - center)**2)
    kernel = (dist <= r_outer) & (dist > r_inner)
    return kernel.astype(np.float32)

def detect_hot_pixels_v1(data, sigma_threshold=5, median_filter_size=3):
    """
    检测图像中的热像素:方法一。

    Args:
        data (numpy.ndarray): 输入的图像数据。
        sigma_threshold (float, optional): 标准差阈值，用于确定热像素。默认为5。
        median_filter_size (int, optional): 中值滤波器的尺寸。默认为3。

    Returns:
        tuple: (hot_pixel_mask, ratio_map, local_mean, local_std)
            hot_pixel_mask: 热像素掩码数组
            ratio_map: P/Q值分布图(全为"undefined")
            local_mean: 局部背景均值(全为背景值)
            local_std: 局部背景标准差(全为全局标准差)
    """
    background = ndimage.median_filter(data, size=median_filter_size)
    residual = data - background
    std = np.std(residual)
    mask = residual > (sigma_threshold * std)
    
    # 为了保持与v3一致的返回格式
    ratio_map = np.full_like(data, "undefined", dtype=object)
    local_mean = np.full_like(data, np.median(data))
    local_std = np.full_like(data, std)
    
    return mask, ratio_map, local_mean, local_std

def detect_hot_pixels_v2(data, sigma_threshold=5, median_filter_size=3, 
                      local_window=21, min_value=1000, max_cluster_size=3):
    """
    检测图像中的热点像素：方法二。

    Args:
        data (numpy.ndarray): 输入的图像数据，二维数组。
        sigma_threshold (float, optional): 标准差阈值，默认为5。
        median_filter_size (int, optional): 中值滤波器的尺寸，默认为3。
        local_window (int, optional): 局部窗口的大小，默认为21。
        min_value (int, optional): 像素值的最小阈值，默认为1000。
        max_cluster_size (int, optional): 最大聚类大小，默认为3。

    Returns:
        tuple: (hot_pixel_mask, ratio_map, local_mean, local_std)
            hot_pixel_mask: 热像素掩码数组
            ratio_map: P/Q值分布图(全为"undefined")
            local_mean: 局部背景均值
            local_std: 局部背景标准差
    """
    background = ndimage.median_filter(data, size=median_filter_size)
    residual = data - background

    local_mean = ndimage.uniform_filter(residual, size=local_window, mode='reflect')
    local_std = ndimage.generic_filter(residual, np.std, size=local_window, mode='reflect')

    candidate_mask = (residual > (local_mean + sigma_threshold * local_std)) & (data > min_value)
    labeled, num_features = ndimage.label(candidate_mask)
    sizes = ndimage.sum(candidate_mask, labeled, range(num_features + 1))

    hot_pixel_mask = np.zeros_like(data, dtype=bool)
    for i in range(1, num_features + 1):
        if sizes[i] <= max_cluster_size:
            hot_pixel_mask[labeled == i] = True

    # 为了保持与v3一致的返回格式
    ratio_map = np.full_like(data, "undefined", dtype=object)
    return hot_pixel_mask, ratio_map, local_mean, local_std

def detect_hot_pixels_v3(data, n_threshold=5, concentration_ratio=0.5,
                         inner_scale=3, outer_scale=6, use_absolute_diff=True):
    """
    检测图像中的热像素: 方法三 (基于局部背景和亮斑集中度的估计方法)。

    Args:
        data (numpy.ndarray): 输入的图像数据。
        n_threshold (float, optional): 背景亮度倍数阈值。
        concentration_ratio (float, optional): P/Q 集中度阈值。
        inner_scale (int, optional): 内环半径因子。
        outer_scale (int, optional): 外环半径因子。
        use_absolute_diff (bool, optional): 是否使用绝对差值，默认启用。

    Returns:
        tuple: (hot_pixel_mask, ratio_map, local_mean, local_std)
    """
    r = estimate_source_radius(data)
    r_inner = int(inner_scale * r)
    r_outer = int(outer_scale * r)
    z_size = 2 * r_outer + 1

    annular_kernel = make_annular_kernel(z_size, r_inner, r_outer)
    norm_factor = np.sum(annular_kernel)

    local_sum = ndimage.convolve(data, annular_kernel, mode='reflect')
    local_sq_sum = ndimage.convolve(data**2, annular_kernel, mode='reflect')

    local_mean = local_sum / norm_factor
    local_var = (local_sq_sum / norm_factor) - (local_mean ** 2)
    local_std = np.sqrt(np.clip(local_var, 1e-10, None))

    diff = abs(data - local_mean) if use_absolute_diff else (data - local_mean)
    condition_1 = diff > (n_threshold * local_std)

    m_kernel = np.array([[0, 1, 0], [1, 1, 1], [0, 1, 0]], dtype=np.float32)
    diff_local_sum = ndimage.convolve(diff, m_kernel, mode='reflect')

    with np.errstate(divide='ignore', invalid='ignore'):
        ratio = diff / diff_local_sum
        ratio[diff_local_sum == 0] = 0

    condition_2 = ratio > concentration_ratio
    hot_pixel_mask = np.logical_and(condition_1, condition_2)

    return hot_pixel_mask, ratio, local_mean, local_std

def replace_hot_pixels(data, hot_pixel_mask, replace_radius=3):
    """
    替换图像中的热像素点，使用周围有效像素的中值进行替换。

    Args:
        data (numpy.ndarray): 输入的图像数据
        hot_pixel_mask (numpy.ndarray): 热像素掩码，True表示热像素位置
        replace_radius (int, optional): 替换半径，用于确定周围像素范围。默认为3。

    Returns:
        numpy.ndarray: 修正后的图像数据
    """
    corrected_data = data.copy()
    coords = np.argwhere(hot_pixel_mask)
    global_median = np.median(data[~hot_pixel_mask])

    for y, x in coords:
        y_min = max(0, y - replace_radius)
        y_max = min(data.shape[0], y + replace_radius + 1)
        x_min = max(0, x - replace_radius)
        x_max = min(data.shape[1], x + replace_radius + 1)

        neighborhood = data[y_min:y_max, x_min:x_max]
        valid_pixels = neighborhood[~hot_pixel_mask[y_min:y_max, x_min:x_max]]

        corrected_data[y, x] = np.median(valid_pixels) if valid_pixels.size > 0 else global_median

    return corrected_data

def plot_to_base64(data, high_contrast=False):
    """
    将图像数据渲染为 base64 PNG 图像，不含坐标轴与标题。
    Args:
        data (np.ndarray): 图像数据
        high_contrast (bool): 若为 True，则采用更强对比度（用于热像素图）
    Returns:
        str: base64 编码的 PNG 图像
    """
    try:
        if high_contrast:
            # 高对比度设定：中间值上下 1% 范围内拉伸
            center = np.median(data)
            vmin = max(np.min(data), center - 1.0)
            vmax = min(np.max(data), center + 1.0)
        else:
            vmin, vmax = ZScaleInterval().get_limits(data)
    except Exception:
        vmin, vmax = np.percentile(data, [5, 95])

    fig = plt.figure(figsize=(6, 6))
    ax = fig.add_subplot(111)
    ax.imshow(data, cmap='gray', origin='lower', vmin=vmin, vmax=vmax)
    ax.axis('off')

    buf = io.BytesIO()
    fig.savefig(buf, format='png', bbox_inches='tight', dpi=120, pad_inches=0)
    plt.close(fig)
    buf.seek(0)
    return base64.b64encode(buf.read()).decode('ascii')

def generate_zip(files, base_folder):
    """
    生成包含指定文件的ZIP压缩包内存对象。

    Args:
        files (list): 要压缩的文件名列表
        base_folder (str): 文件所在的基础目录路径

    Returns:
        io.BytesIO: 包含ZIP文件的内存缓冲区对象，如果无有效文件则返回None
    """
    zip_buffer = io.BytesIO()
    with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zf:
        for filename in files:
            file_path = os.path.join(base_folder, filename)
            if os.path.isfile(file_path):
                logger.info(f'添加文件到压缩包: {file_path}')
                zf.write(file_path, arcname=filename)
            else:
                logger.warning(f'文件不存在，跳过: {file_path}')
    zip_buffer.seek(0)
    if zip_buffer.getbuffer().nbytes == 0:
        return None
    return zip_buffer

def force_numbered_name(filename, target_folder):
    """
    生成带编号的文件名以避免目标文件夹中的文件名冲突。

    Args:
        filename (str): 原始文件名
        target_folder (str): 目标文件夹路径

    Returns:
        str: 新的不重复的文件名
    """
    import os
    name, ext = os.path.splitext(filename)
    i = 1
    while True:
        new_name = f"{name}_{i}{ext}"
        dst_path = os.path.join(target_folder, new_name)
        if not os.path.exists(dst_path):
            return new_name
        i += 1

def load_metadata(path):
	"""
	加载元数据文件，返回纯列表。

	Args:
		path (str): 元数据文件路径

	Returns:
		list: 包含元数据的列表，文件不存在或解析失败返回空列表
	"""
	if not os.path.exists(path):
		return []

	try:
		with open(path, 'r', encoding='utf-8') as f:
			data = json.load(f)
			if isinstance(data, list):
				return data
			if isinstance(data, dict):
				# 兼容旧格式
				files = data.get('files')
				if isinstance(files, list):
					return files
				else:
					logger.warning(f"metadata文件中 'files' 字段格式错误: {path}")
			else:
				logger.warning(f"metadata文件格式不符合预期，既非list也非dict: {path}")
	except Exception as e:
		logger.error(f"加载metadata失败: {path} 错误: {e}")

	return []

def write_metadata(path, data):
	"""
	写入元数据文件，写为纯列表格式。

	Args:
		path (str): 元数据文件路径
		data (list): 元数据列表

	Returns:
		None
	"""
	if not isinstance(data, list):
		logger.error(f"写入metadata失败，传入数据不是列表类型: {type(data)}")
		return

	try:
		with open(path, 'w', encoding='utf-8') as f:
			json.dump(data, f, indent=2, ensure_ascii=False)
	except Exception as e:
		logger.error(f"写入metadata失败: {path} 错误: {e}")

def clear_temp_results(temp_folder):
    """
    清空给定的临时结果文件夹内容。
    """
    try:
        for filename in os.listdir(temp_folder):
            file_path = os.path.join(temp_folder, filename)
            if os.path.isfile(file_path) or os.path.islink(file_path):
                os.unlink(file_path)
            elif os.path.isdir(file_path):
                shutil.rmtree(file_path)
    except Exception:
        pass

def safe_float_format(value, fmt="{:.2f}"):
    """格式化浮点数，如果不是数字或不是有限值则返回'undefined'"""
    return fmt.format(value) if isinstance(value, (int, float, np.integer, np.floating)) and np.isfinite(value) else "undefined"

def process_task(filepath, filename, method, form_data, output_folder):
    """
    处理单个FITS文件的任务函数。

    参数:
        - filepath: str, 已保存文件的完整路径
        - filename: str, 安全化后的文件名（不含路径）
        - method: str, 检测方法
        - form_data: dict, 来自request.form的参数
        - output_folder: str, 输出目录路径

    返回:
        - result: dict，包含图像处理结果（供前端显示）
        - metadata_entries: list[dict]，本文件相关的三个结果文件元信息
    """
    # 直接打开已保存文件路径
    try:
        with fits.open(filepath, memmap=True) as hdul:
            data = hdul[0].data.astype(np.float32)
            header = hdul[0].header
    except Exception:
        with fits.open(filepath, memmap=False) as hdul:
            data = hdul[0].data.astype(np.float32)
            header = hdul[0].header

    field_id = header.get('FIELD_ID', '未知')
    obj_name = header.get('OBJECT', '未知')
    date_obs = header.get('DATE-OBS', '未知')

    # 根据方法选择参数和函数
    if method == 'v1':
        params = {
            'sigma_threshold': float(form_data['sigma_threshold']),
            'median_filter_size': int(form_data['median_size'])
        }
        mask, ratio_map, local_mean, local_std = detect_hot_pixels_v1(data, **params)
        method_name = '全局统计方法'
    elif method == 'v2':
        params = {
            'sigma_threshold': float(form_data['sigma_threshold']),
            'median_filter_size': int(form_data['median_size']),
            'local_window': int(form_data['local_window']),
            'min_value': float(form_data['min_value']),
            'max_cluster_size': int(form_data['max_cluster_size'])
        }
        mask, ratio_map, local_mean, local_std = detect_hot_pixels_v2(data, **params)
        method_name = '局部统计方法'
    elif method == 'v3':
        params = {
            'n_threshold': float(form_data['n_threshold']),
            'concentration_ratio': float(form_data['concentration_ratio']),
            'use_absolute_diff': form_data.get('use_absolute_diff', 'true').lower() == 'true'
        }
        mask, ratio_map, local_mean, local_std = detect_hot_pixels_v3(data, **params)
        method_name = '锐度统计方法'
    else:
        raise ValueError("无效检测方法")

    corrected = replace_hot_pixels(data, mask, replace_radius=config.REPLACE_RADIUS)
    hot_pixel_data = np.zeros_like(data)
    hot_pixel_data[mask] = data[mask]

    base_name = os.path.splitext(filename)[0]
    corrected_name = f"{base_name}_hp.fits"
    hot_pixel_name = f"{base_name}_hotpixels.fits"
    params_name = f"{base_name}_hotpixels_params.txt"

    corrected_path = os.path.join(output_folder, corrected_name)
    hot_pixel_path = os.path.join(output_folder, hot_pixel_name)
    params_path = os.path.join(output_folder, params_name)

    fits.writeto(corrected_path, corrected, overwrite=True)
    fits.writeto(hot_pixel_path, hot_pixel_data, overwrite=True)

    coords = np.argwhere(mask)
    hot_pixel_values = data[mask]
    replaced_values = corrected[mask]
    ratios = ratio_map[mask]
    bkgs = local_mean[mask]
    stds = local_std[mask]
    confidence = (hot_pixel_values - bkgs) / (stds + 1e-6)

    sort_idx = np.argsort(-hot_pixel_values)
    coords = coords[sort_idx]
    hot_pixel_values = hot_pixel_values[sort_idx]
    replaced_values = replaced_values[sort_idx]
    ratios = ratios[sort_idx]
    bkgs = bkgs[sort_idx]
    stds = stds[sort_idx]
    confidence = confidence[sort_idx]

    with open(params_path, 'w') as f:
        f.write(f"热像素检测报告 - {filename}\n")
        f.write("=" * 40 + "\n")
        f.write(f"检测时间: {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
        f.write(f"检测方法: {method_name}\n")
        f.write(f"总热像素点数: {len(coords)}\n\n")

        f.write("检测参数:\n" + "-" * 40 + "\n")
        for key, value in params.items():
            f.write(f"{key:20}: {value}\n")

        f.write("\n热像素点详细信息 (按亮度降序排列):\n" + "-" * 40 + "\n")
        f.write("序号    X坐标    Y坐标    原始值    替换值    锐度比    局部均值    局部Std    置信度\n" + "-" * 40 + "\n")

        for i, (y, x) in enumerate(coords):
            f.write(f"{i+1:<6} {x:<8} {y:<8} {hot_pixel_values[i]:<9.2f} {replaced_values[i]:<9.2f} "
                    f"{safe_float_format(ratios[i], '{:<9.3f}')} {safe_float_format(bkgs[i], '{:<11.2f}')} "
                    f"{safe_float_format(stds[i], '{:<10.2f}')} {safe_float_format(confidence[i], '{:<9.2f}')}\n")

    result = {
        'filename': filename,
        'original_image': plot_to_base64(data), #f'Original - {filename}'),
        'hot_pixel_image': plot_to_base64(hot_pixel_data,high_contrast=True), #f'Hot Pixels - {filename}'),
        'corrected_image': plot_to_base64(corrected), #f'Corrected - {filename}'),
        'hot_pixel_count': len(coords)
    }

    metadata_entries = []
    for name, file_type, desc in [
        (corrected_name, "corrected", "修正后的FITS图像"),
        (hot_pixel_name, "hotpixels", "热像素标记图像"),
        (params_name, "params", "热像素检测参数报告")
    ]:
        metadata_entries.append({
            'filename': name,
            'base_name': os.path.splitext(name)[0],
            'field_id': field_id,
            'object': obj_name,
            'date_obs': date_obs,
            'method': method_name,
            'params': params,
            'type': file_type,
            'description': desc,
            'file_extension': os.path.splitext(name)[-1]
        })

    if os.path.exists(filepath):
        os.remove(filepath)

    return result, metadata_entries

def allowed_file(filename, allowed_extensions={'fits', 'fit'}):
    return '.' in filename and filename.rsplit('.', 1)[1].lower() in allowed_extensions




