import os
import numpy as np
import pandas as pd
import nibabel as nib
from tqdm import tqdm
import scipy.ndimage
import logging

logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)

def safe_load_nifti(path):
    """安全加载NIfTI文件，处理可能的异常"""
    try:
        img = nib.load(path)
        return img
    except Exception as e:
        logger.error(f"Error loading {path}: {str(e)}")
        return None

def resample_to_isotropic(data, original_spacing, target_spacing=(1.0, 1.0, 1.0)):
    """将各向异性数据重采样为各向同性体素"""
    scale_factors = [
        original_spacing[0] / target_spacing[0],# 计算缩放因子
        original_spacing[1] / target_spacing[1],
        original_spacing[2] / target_spacing[2]
    ]
    
    resampled = scipy.ndimage.zoom(
        data, 
        scale_factors, 
        order=3 if data.dtype == np.float32 else 0  # 图像用三次样条，标签用最近邻
    )
    return resampled, target_spacing

def crop_around_center(resampled_data, center_voxel, crop_size=64):
    start_coords = [
        max(0, int(center_voxel[i] - crop_size // 2)) 
        for i in range(3)
    ]
    end_coords = [
        min(resampled_data.shape[i], start_coords[i] + crop_size)
        for i in range(3)
    ]
    crop = resampled_data[
        start_coords[0]:end_coords[0],
        start_coords[1]:end_coords[1],
        start_coords[2]:end_coords[2]
    ]
    
    pad_width = [
        (max(0, crop_size - (end_coords[i] - start_coords[i])), 0)
        if start_coords[i] + crop_size > resampled_data.shape[i] 
        else (0, 0)
        for i in range(3)
    ]
    
    result = np.pad(crop, pad_width, mode='constant')    # 应用填充
    
    return result
def main():

    image_dir = os.path.join('../../../data_2/lzy/ribfrac-train-images/ribfrac-train-images')  # CT影像目录
    label_dir = os.path.join('../../../data_2/lzy/ribfrac-train-images/ribfrac-train-labels')  # 标注文件目录
    csv_path =  os.path.join('../../../data_2/lzy/ribfrac-train-images/ribfrac-train-info.csv')  # 元数据CSV文件
    output_dir = os.path.join('./ribfrac-patches/z_spacing')
    
    for subdir in ['images', 'labels', 'metadata', 'logs']:
        os.makedirs(os.path.join(output_dir, subdir), exist_ok=True)
    
    log_file = os.path.join(output_dir, 'logs', 'processing_log.txt')
    file_handler = logging.FileHandler(log_file)
    file_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
    logger.addHandler(file_handler)
    
    logger.info("Starting processing with spatial normalization")
    logger.info(f"Target output directory: {output_dir}")
    
    if not os.path.exists(csv_path):
        logger.error(f"Metadata CSV not found at {csv_path}")
        return
    
    df = pd.read_csv(csv_path)
    valid_df = df[df['label_code'].isin([1, 2, 3, 4, -1])]
    
    fracture_dict = {}
    for pid, group in valid_df.groupby('public_id'):
        fracture_dict[pid] = {
            'label_ids': group['label_id'].tolist(),
            'label_codes': group['label_code'].tolist()
        }
    
    patch_info = []
    skipped_files = []
    target_spacing = (1.0, 1.0, 1.0)  # 目标各向同性spacing (1mm³)
    crop_size = 64  # 统一裁剪尺寸
    
    for public_id, frac_info in tqdm(fracture_dict.items(), desc='Processing CT scans'):
        img_path = os.path.join(image_dir, f'{public_id}-image.nii.gz')
        label_path = os.path.join(label_dir, f'{public_id}-label.nii.gz')
        
        if not os.path.exists(img_path):
            msg = f"CT file missing: {img_path}"
            logger.error(msg)
            skipped_files.append(img_path)
            continue
        
        if not os.path.exists(label_path):
            msg = f"Label file missing: {label_path}"
            logger.error(msg)
            skipped_files.append(label_path)
            continue
        
        ct_img = safe_load_nifti(img_path)
        if ct_img is None:
            msg = f"Failed to load CT file: {img_path}"
            logger.error(msg)
            skipped_files.append(img_path)
            continue
        
        try:
            original_spacing = ct_img.header.get_zooms()[:3] # 获取原始spacing信息
            ct_data = ct_img.get_fdata().astype(np.float32)
            affine = ct_img.affine
            
            logger.info(f"Processing {public_id} | Original spacing: "
                       f"{original_spacing[0]:.3f}×{original_spacing[1]:.3f}×{original_spacing[2]:.3f} mm")
        except Exception as e:
            msg = f"Error processing CT metadata: {img_path} | {str(e)}"
            logger.error(msg)
            skipped_files.append(img_path)
            continue
        
        # 加载标签图像
        label_img = safe_load_nifti(label_path)
        if label_img is None:
            msg = f"Failed to load label file: {label_path}"
            logger.error(msg)
            skipped_files.append(label_path)
            continue
        
        try:
            label_data = label_img.get_fdata()
        except Exception as e:
            msg = f"Error loading label data: {label_path} | {str(e)}"
            logger.error(msg)
            skipped_files.append(label_path)
            continue
        
        if not np.allclose(original_spacing, target_spacing, atol=0.01):
            logger.info(f"  Resampling CT to isotropic voxels (1mm * 1mm * 1mm) for {public_id}")
            ct_data, new_spacing = resample_to_isotropic(ct_data, original_spacing, target_spacing)
            
            logger.info(f"  Resampling labels to isotropic voxels for {public_id}")
            label_data, _ = resample_to_isotropic(label_data.astype(np.uint8), original_spacing, target_spacing)
            
            spacing_used = new_spacing
            logger.info(f"  Resampled shape: {ct_data.shape} with spacing {new_spacing}")
        else:
            logger.info(f"  Using original data - already isotropic (1mm * 1mm * 1mm)")
            spacing_used = original_spacing
        
        current_fracture_count = 0

        for label_id, label_code in zip(frac_info['label_ids'], frac_info['label_codes']):# 处理每个骨折标注
            fracture_mask = (label_data == label_id).astype(np.uint8)

            positions = np.argwhere(fracture_mask > 0)            # 找到骨折区域
            if len(positions) == 0:
                msg = f"No voxels found for fracture {label_id} in {public_id}"
                logger.warning(msg)
                continue

            center = np.mean(positions, axis=0).astype(int)# 计算骨折中心坐标（重采样后空间）
            
            ct_crop = crop_around_center(ct_data, center, crop_size)
            mask_crop = crop_around_center(fracture_mask, center, crop_size)
            
            base_name = f'{public_id}_{label_id}'
            ct_filename = os.path.join('images', f'{base_name}_ct.npy')
            mask_filename = os.path.join('labels', f'{base_name}_mask.npy')
            
            np.save(os.path.join(output_dir, ct_filename), ct_crop)
            np.save(os.path.join(output_dir, mask_filename), mask_crop)

            patch_info.append({
                'public_id': public_id,
                'label_id': label_id,
                'class_label': label_code,
                'center_x': center[0],
                'center_y': center[1],
                'center_z': center[2],
                'original_spacing_x': original_spacing[0],
                'original_spacing_y': original_spacing[1],
                'original_spacing_z': original_spacing[2],
                'resampled_spacing_x': spacing_used[0],
                'resampled_spacing_y': spacing_used[1],
                'resampled_spacing_z': spacing_used[2],
                'physical_size_mm': crop_size,  # 64mm * mm * mm物理尺寸
                'ct_path': ct_filename,
                'mask_path': mask_filename
            })
            
            current_fracture_count += 1
            logger.info(f"  Extracted fracture {label_id} at resampled coord {center}")

        logger.info(f"Processed {public_id}: Extracted {current_fracture_count} fractures")
    
    metadata_df = pd.DataFrame(patch_info)
    metadata_path = os.path.join(output_dir, 'metadata', 'patch_metadata.csv')
    metadata_df.to_csv(metadata_path, index=False)
    logger.info(f"Saved metadata to {metadata_path}")
    
    if skipped_files:
        skipped_file = os.path.join(output_dir, 'logs', 'skipped_files.txt')
        with open(skipped_file, 'w') as sf:
            sf.write("\n".join(skipped_files))
        logger.warning(f"{len(skipped_files)} files skipped, see: {skipped_file}")
    
    logger.info(f"Successfully generated {len(patch_info)} 3D patches with isotropic voxels")
    logger.info(f"All patches normalized to {crop_size}mm * mm * mm physical space")
    logger.info(f"Processing complete. Output directory: {output_dir}")

if __name__ == "__main__":
    main()