import os
import numpy as np
import open3d as o3d
from tqdm import tqdm
from multiprocessing import Pool

def process_ply_file(args):
    """处理单个PLY文件（适配多进程调用）"""
    ply_path, output_dir = args  # 解压参数
    try:
        # 读取PLY文件
        pcd = o3d.io.read_point_cloud(ply_path)
        points = np.asarray(pcd.points)  # 坐标 [N, 3]
        colors = np.asarray(pcd.colors)  # RGB [N, 3] (范围0-1)

        # 估算法向量（如果不存在）
        if not pcd.has_normals():
            pcd.estimate_normals(search_param=o3d.geometry.KDTreeSearchParamHybrid(radius=0.1, max_nn=30))
        normals = np.asarray(pcd.normals)

        # 从文件直接读取标签（假设最后一列是标签）
        with open(ply_path, 'r') as f:
            lines = f.readlines()
        vertex_line = [i for i, line in enumerate(lines) if "element vertex" in line][0]
        num_points = int(lines[vertex_line].split()[-1])
        header_len = [i for i, line in enumerate(lines) if "end_header" in line][0] + 1
        data = np.loadtxt(ply_path, skiprows=header_len, max_rows=num_points)
        labels = data[:, -1].astype(np.int32)  # 最后一列是标签

        # 确保数据连续存储
        points = np.ascontiguousarray(points)
        colors = np.ascontiguousarray((colors * 255).astype(np.uint8))  # 转为0-255
        normals = np.ascontiguousarray(normals)
        labels = np.ascontiguousarray(labels)

        # 创建输出目录
        os.makedirs(output_dir, exist_ok=True)

        # 保存文件
        np.save(os.path.join(output_dir, "coord.npy"), points.astype(np.float32))
        np.save(os.path.join(output_dir, "color.npy"), colors)
        np.save(os.path.join(output_dir, "normal.npy"), normals.astype(np.float32))
        np.save(os.path.join(output_dir, "segment.npy"), labels.astype(np.int32))
        
        return True
    except Exception as e:
        print(f"Error processing {ply_path}: {str(e)}")
        return False

def batch_convert_ply_to_npy(ply_folder, output_root, num_workers=4):
    """批量转换PLY文件夹中的所有文件（修复多进程问题）"""
    ply_files = []
    for root, _, files in os.walk(ply_folder):
        for file in files:
            if file.endswith(".ply"):
                ply_files.append(os.path.join(root, file))
    
    print(f"Found {len(ply_files)} PLY files to process.")
    
    # 准备参数列表（确保可序列化）
    args_list = [
        (ply_path, os.path.join(output_root, os.path.splitext(os.path.basename(ply_path))[0]))
        for ply_path in ply_files
    ]
    
    # 使用imap替代starmap（更稳定）
    with Pool(processes=num_workers) as pool:
        results = list(tqdm(
            pool.imap(process_ply_file, args_list),
            total=len(ply_files),
            desc="Processing PLY files"
        ))
    
    # 检查处理结果
    success_count = sum(results)
    print(f"Successfully processed {success_count}/{len(ply_files)} files.")

if __name__ == "__main__":
    ply_folder = "202505/3dpoints"        # 替换为PLY文件所在目录
    output_root = "202505/output"         # 输出目录
    batch_convert_ply_to_npy(ply_folder, output_root, num_workers=4)