import laspy
import numpy as np
import open3d as o3d
from sklearn.cluster import DBSCAN
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
from scipy.spatial import KDTree
import matplotlib.pyplot as plt

def load_las_file(file_path):
    """
    加载LAS文件并返回点云数据
    """
    print(f"正在加载LAS文件: {file_path}")
    las = laspy.read(file_path)
    
    # 获取点坐标
    points = np.vstack((las.x, las.y, las.z)).transpose()
    
    # 获取强度信息（如果可用）
    intensity = np.array(las.intensity) if hasattr(las, 'intensity') else np.zeros(len(points))
    
    # 获取分类信息（如果可用）
    classification = np.array(las.classification) if hasattr(las, 'classification') else np.zeros(len(points))
    
    print(f"加载了 {len(points)} 个点")
    return points, intensity, classification

def estimate_local_terrain(points, search_radius=15.0, min_neighbors=50):
    """
    估计每个点周围的局部地形高度
    """
    print("估计局部地形...")
    tree = KDTree(points[:, :2])  # 只在XY平面建树
    
    local_ground_levels = np.zeros(len(points))
    
    for i in range(len(points)):
        print("正在估计第%s个点", i)
        # 搜索半径内的邻居点
        neighbors_idx = tree.query_ball_point(points[i, :2], search_radius)
        
        if len(neighbors_idx) < min_neighbors:
            # 如果邻居太少，使用全局统计
            local_ground_levels[i] = np.percentile(points[:, 2], 10)  # 使用最低的10%点作为地面估计
        else:
            neighbor_points = points[neighbors_idx]
            # 使用最低的5%点的高度作为局部地面估计
        local_ground_levels[i] = np.percentile(neighbor_points[:, 2], 5)
    
    return local_ground_levels

def preprocess_point_cloud(points, intensity, voxel_size=0.5):
    """
    预处理点云：下采样和计算法线
    """
    print("预处理点云...")
    # 创建Open3D点云对象
    pcd = o3d.geometry.PointCloud()
    pcd.points = o3d.utility.Vector3dVector(points)
    
    # 如果有强度信息，可以将其作为颜色
    if np.any(intensity):
        # 归一化强度值到0-1范围
        intensity_normalized = (intensity - np.min(intensity)) / (np.max(intensity) - np.min(intensity))
        # 创建颜色数组（灰度）
        colors = np.zeros((points.shape[0], 3))
        colors[:, 0] = intensity_normalized  # R
        colors[:, 1] = intensity_normalized  # G
        colors[:, 2] = intensity_normalized  # B
        pcd.colors = o3d.utility.Vector3dVector(colors)
    
    # 下采样
    downpcd = pcd.voxel_down_sample(voxel_size=voxel_size)
    print(f"下采样后点数: {len(downpcd.points)}")
    
    # 估计法线
    downpcd.estimate_normals(search_param=o3d.geometry.KDTreeSearchParamHybrid(radius=voxel_size*2, max_nn=30))
    
    return downpcd

def extract_advanced_features(pcd, intensity, local_ground_levels, k=50):
    """
    为每个点提取更丰富的特征，包括相对高度
    """
    print("提取高级特征...")
    points = np.asarray(pcd.points)
    normals = np.asarray(pcd.normals)
    
    # 创建KD树用于最近邻搜索
    pcd_tree = o3d.geometry.KDTreeFlann(pcd)
    
    # 初始化特征数组 - 更多特征维度
    num_points = points.shape[0]
    features = np.zeros((num_points, 12))  # 增加到12个特征
    
    for i in range(num_points):
        # 1. 相对于局部地形的高度 (最重要!)
        features[i, 0] = points[i, 2] - local_ground_levels[i]
        
        # 2. 当前点的强度
        if len(intensity) == len(points):
            features[i, 1] = intensity[i]
        else:
            features[i, 1] = 0
        
        # 3. 法线的z分量 (垂直程度)
        features[i, 2] = normals[i, 2]
        
        # 查找k个最近邻点
        [k_found, idx, _] = pcd_tree.search_knn_vector_3d(pcd.points[i], k)
        
        neighbor_points = points[idx]
        neighbor_normals = normals[idx]
        
        # 4. 局部点云的高度变化
        features[i, 3] = np.std(neighbor_points[:, 2])
        
        # 5. 局部点云的高度范围
        features[i, 4] = np.max(neighbor_points[:, 2]) - np.min(neighbor_points[:, 2])
        
        # 6. 局部点云的密度
        if len(idx) > 1:
            distances = np.linalg.norm(neighbor_points - points[i], axis=1)
            features[i, 5] = len(idx) / (np.pi * (np.max(distances)) ** 2 + 1e-6)
        else:
            features[i, 5] = 0
        
        # 7. 局部点云的法线变化
        features[i, 6] = np.std(neighbor_normals[:, 2])
        
        # 8. 局部点云的水平分布
        neighbor_xy = neighbor_points[:, :2]
        centroid_xy = np.mean(neighbor_xy, axis=0)
        features[i, 7] = np.mean(np.linalg.norm(neighbor_xy - centroid_xy, axis=1))
        
        # 9. 点到局部质心的距离
        centroid = np.mean(neighbor_points, axis=0)
        features[i, 8] = np.linalg.norm(points[i] - centroid)
        
        # 10. 局部曲率 (使用特征值)
        centered = neighbor_points - centroid
        cov_matrix = centered.T @ centered / len(centered)
        eigenvalues = np.linalg.eigvalsh(cov_matrix)
        eigenvalues_sorted = np.sort(eigenvalues)[::-1]
        if eigenvalues_sorted[0] > 0:
            features[i, 9] = eigenvalues_sorted[2] / eigenvalues_sorted[0]  # 线性度
        
        # 11. 局部地形的坡度
        xy_distances = np.linalg.norm(neighbor_xy - points[i, :2], axis=1)
        z_differences = neighbor_points[:, 2] - points[i, 2]
        valid_mask = xy_distances > 0
        if np.any(valid_mask):
            slopes = np.abs(z_differences[valid_mask] / xy_distances[valid_mask])
        features[i, 10] = np.mean(slopes) if len(slopes) > 0 else  0
        
        # 12. 局部粗糙度
        if len(idx) >= 5:
            # 使用局部平面拟合的残差作为粗糙度
            A = np.column_stack([neighbor_xy, np.ones(len(neighbor_xy))])
            try:
                coeffs, _, _, _ = np.linalg.lstsq(A, neighbor_points[:, 2], rcond=None)
                predicted_z = A @ coeffs
                features[i, 11] = np.std(neighbor_points[:, 2] - predicted_z)
            except:
                features[i, 11] = 0
        else:
            features[i, 11] = 0
    
    return features

def adaptive_segmentation(points, features, local_ground_levels):
    """
    自适应分割建筑物和道路，考虑局部地形
    """
    print("进行自适应分割...")
    
    # 获取相对高度特征
    relative_heights = features[:, 0]
    
    # 动态阈值设置
    building_candidates = relative_heights > 2.0  # 相对于地面至少2米
    
    # 进一步细化建筑物检测
    refined_building_mask = np.zeros(len(points), dtype=bool)
    
    # 对每个候选建筑点进行验证
    for i in np.where(building_candidates)[0]:
        # 检查局部一致性
        local_height_var = features[i, 3]
        normal_verticality = features[i, 2]
        local_roughness = features[i, 11]
        
        # 建筑物特征：相对高度足够，法线较垂直，表面相对光滑
        if (relative_heights[i] > 1.5 and  # 降低一点阈值
            local_height_var > 0.05 and   # 有一定高度变化（不是平地）
            local_roughness < 0.5):       # 表面相对光滑
            refined_building_mask[i] = True
    
    # 道路检测：相对高度接近地面，表面平坦，粗糙度低
    road_mask = (
        (relative_heights < 0.5) &           # 靠近地面
        (features[:, 3] < 0.15) &          # 高度变化小
        (features[:, 11] < 0.3)  )           # 表面光滑
    
    building_indices = np.where(refined_building_mask)[0]
    road_indices = np.where(road_mask)[0]
    
    print(f"检测到 {len(building_indices)} 个建筑物点，{len(road_indices)} 个道路点")
    
    return building_indices, road_indices

def multi_scale_analysis(points, base_search_radius=15.0):
    """
    多尺度地形分析 - 修正版
    为每个点在三个不同半径尺度下计算地形特征
    """
    print("进行多尺度地形分析...")
    tree = KDTree(points[:, :2])
    
    multi_scale_features = np.zeros((len(points), 3))
    radii = [base_search_radius * 0.5, base_search_radius, base_search_radius * 2]
    
    for point_idx in range(len(points)):  # 遍历所有点
        print(f"正在处理点 {point_idx}...")
        point_features = []
        for radius in radii:  # 对每个点，在三个半径下分析
            neighbors_idx = tree.query_ball_point(points[point_idx, :2], radius)
        if len(neighbors_idx) > 10:
            neighbor_z = points[neighbors_idx, 2]
            # 计算5%分位数作为局部地面高度
            percentile_5 = np.percentile(neighbor_z, 5)
            point_features.append(percentile_5)
        else:
            # 如果邻域点太少，使用当前点的高度作为默认值
            point_features.append(points[point_idx, 2])
        
        multi_scale_features[point_idx, :] = point_features
            
    return multi_scale_features

def post_process_segmentation(points, building_indices, road_indices, min_building_size=20):
    """
    后处理分割结果，去除噪声和小块
    """
    print("后处理分割结果...")
    
    # 对建筑物点进行聚类后处理
    if len(building_indices) > 0:
        building_points = points[building_indices]
        
        # 使用更严格的DBSCAN参数
        clustering = DBSCAN(eps=2.5, min_samples=min_building_size).fit(building_points)
        labels = clustering.labels_
        
        # 过滤掉小簇和噪声
        valid_building_mask = np.zeros(len(building_indices), dtype=bool)
        
        unique_labels, counts = np.unique(labels[labels != -1], return_counts=True)
        
        for label, count in zip(unique_labels, counts):
            if count >= min_building_size:
                valid_building_mask |= (labels == label)
        
        # 更新建筑物索引
        building_indices = building_indices[valid_building_mask]
        building_labels = labels[valid_building_mask]
        
        n_clusters = len(unique_labels)
        n_noise = np.sum(labels == -1)
        
        print(f"后处理后: {n_clusters} 个有效建筑物，{n_noise} 个噪声点被移除")
    else:
        building_labels = np.array([])
    
    return building_indices, building_labels

def cluster_buildings(points, building_indices, eps=3.0, min_samples=15):
    """
    对建筑物点进行聚类，以分离不同的建筑物
    """
    print("聚类建筑物...")
    
    if len(building_indices) == 0:
        print("没有检测到建筑物点")
        return np.array([])
    
    building_points = points[building_indices]
    
    # 使用DBSCAN聚类
    clustering = DBSCAN(eps=eps, min_samples=min_samples).fit(building_points)
    labels = clustering.labels_
    
    # 统计聚类结果
    n_clusters = len(set(labels)) - (1 if -1 in labels else 0)
    n_noise = list(labels).count(-1)
    
    print(f"检测到 {n_clusters} 个建筑物，{n_noise} 个噪声点")
    
    return labels

def visualize_results(pcd, building_indices, road_indices, building_labels=None):
    """
    可视化分割结果
    """
    print("可视化结果...")
    
    # 创建颜色数组
    colors = np.zeros((len(pcd.points), 3))
    
    # 道路点着色为深灰色
    colors[road_indices] = [0.3, 0.3, 0.3]  # 深灰色
    
    # 建筑物点着色
    if building_labels is not None and len(building_indices) > 0:
        # 为每个建筑物分配不同颜色
        unique_labels = set(building_labels)
        color_map = {}
        
        # 预定义一些鲜明的颜色
        predefined_colors = [
            [1, 0, 0], [0, 1, 0], [0, 0, 1], [1, 1, 0], [1, 0, 1], [0, 1, 1],
            [0.5, 0, 0], [0, 0.5, 0], [0, 0, 0.5], [0.5, 0.5, 0], [0.5, 0, 0.5]
        ]
        
        for j, label in enumerate(unique_labels):
            if label == -1:  # 噪声点
                color_map[label] = [0, 0, 0]  # 黑色
            else:
                color_idx = j % len(predefined_colors)
                color_map[label] = predefined_colors[color_idx]
        
        for j, idx in enumerate(building_indices):
            colors[idx] = color_map[building_labels[j]]
    elif len(building_indices) > 0:
        # 如果没有聚类信息，所有建筑物点着色为红色
        colors[building_indices] = [1, 0, 0]  # 红色
    
    # 其他点着色为浅绿色
    other_indices = np.setdiff1d(np.arange(len(pcd.points)), np.concatenate([building_indices, road_indices]))
    colors[other_indices] = [0.7, 1, 0.7]  # 浅绿色
    
    # 更新点云颜色
    pcd.colors = o3d.utility.Vector3dVector(colors)
    
    # 可视化
    o3d.visualization.draw_geometries([pcd],
                                      window_name="改进的建筑和道路检测结果",
                                      width=1024,
                                      height=768,
                                      point_show_normal=False)

def main(las_file_path):
    """
    主函数
    """
    # 1. 加载LAS文件
    points, intensity, classification = load_las_file(las_file_path)
    
    # 2. 预处理点云
    pcd = preprocess_point_cloud(points, intensity, voxel_size=0.5)
    downsampled_points = np.asarray(pcd.points)
    
    # 3. 多尺度地形分析
    # multi_scale_features = multi_scale_analysis(downsampled_points)
    
    # 4. 估计局部地形
    local_ground_levels = estimate_local_terrain(downsampled_points)
    
    # 5. 提取高级特征
    features = extract_advanced_features(pcd, intensity, local_ground_levels)
    
    # 6. 自适应分割
    building_indices, road_indices = adaptive_segmentation(downsampled_points, features, local_ground_levels)
    
    # 7. 对建筑物点进行聚类
    building_labels = cluster_buildings(downsampled_points, building_indices)
    
    # 8. 后处理
    building_indices, building_labels = post_process_segmentation(
        downsampled_points, building_indices, road_indices
    )
    
    # 9. 可视化结果
    visualize_results(pcd, building_indices, road_indices, building_labels)
    
    print("处理完成!")

if __name__ == "__main__":
    # 替换为你的LAS文件路径
    las_file_path = "/mnt/d/temp_files/part_pointCloud.las"
    main(las_file_path)