import torch
torch.manual_seed(0)

class BatchPointEncoderDecoder:
    def __init__(self, max_points_per_batch=204800):
        """
        初始化编码器/解码器（保证编码与输入点一一对应）
        
        参数:
        max_points_per_batch: 每个batch允许的最大点数（用于计算序号位数）
        """
        self.max_points_per_batch = max_points_per_batch
        self.batch_bits = 16       # 支持最多 2^16=65536 个batch
        self.index_bits = 21       # 2^21=2097152 > 204800（单个batch最大点数）
        self.offset_bits = 26      # 2^26=67108864（支持最大输入点数约6700万）
        assert self.batch_bits + self.index_bits + self.offset_bits <= 63, "总位数超过63位（int64限制）"
        
        # 计算掩码和位移量
        self.batch_mask = (1 << self.batch_bits) - 1
        self.index_mask = (1 << self.index_bits) - 1
        self.offset_mask = (1 << self.offset_bits) - 1
        self.batch_shift = self.index_bits + self.offset_bits
        self.index_shift = self.offset_bits

    def _get_batch_sampling_order(self, points, batch_indices):
        """
        计算每个batch内点的随机采样顺序（序号），确保输入顺序与输出code一一对应
        
        参数:
        points: 输入点云 [N, 3]
        batch_indices: 每个点的batch编号 [N]
        
        返回:
        sampling_order: 每个点在其batch内的随机采样顺序序号 [N]
        """
        device = points.device
        N = len(batch_indices)
        sampling_order = torch.zeros(N, dtype=torch.long, device=device)  # 存储每个点的采样序号
        
        # 按batch分组处理
        unique_batches = torch.unique(batch_indices)
        for batch_id in unique_batches:
            # 获取当前batch的所有点索引
            batch_mask = (batch_indices == batch_id).nonzero(as_tuple=True)[0]
            batch_size = len(batch_mask)
            
            if batch_size == 0:
                continue
            
            # 在当前batch内生成随机采样顺序
            order = torch.randperm(batch_size, device=device)  # [batch_size]，随机排列的索引
            
            # 将采样顺序映射到原始输入点的索引
            for sample_idx, original_idx_in_batch in enumerate(order):
                global_idx = batch_mask[original_idx_in_batch]  # 输入点云中的全局索引
                sampling_order[global_idx] = sample_idx  # 序号：随机分配的顺序
        
        return sampling_order

    def encode(self, points, batch_indices):
        """
        编码器：输入点云与batch信息，输出一一对应的code
        
        参数:
        points: 输入点云 [N, 3]
        batch_indices: 每个点的batch编号 [N]，形状必须与points一致
        
        返回:
        codes: 编码结果 [N]，每个元素对应输入点的编码
        """
        N = len(points)
        assert len(batch_indices) == N, "输入点云与batch_indices长度必须一致"
        
        # 1. 计算每个点在其batch内的随机采样顺序序号
        sampling_order = self._get_batch_sampling_order(points, batch_indices)
        
        # 2. 构建编码：batch << batch_shift | sampling_order << index_shift | global_offset
        global_offsets = torch.arange(N, dtype=torch.long, device=points.device)  # 输入点的原始索引（0~N-1）
        codes = (
            (batch_indices.to(torch.int64) & self.batch_mask) << self.batch_shift |
            (sampling_order.to(torch.int64) & self.index_mask) << self.index_shift |
            (global_offsets & self.offset_mask)
        )
        return codes

    def decode(self, codes, grid_coord=None, if_test=False):
        """
        解码器：从code中恢复batch、采样序号、原始索引，并根据grid_coord重建坐标
        
        参数:
        codes: 编码结果 [N]
        grid_coord: 网格坐标 [N, 3]，用于重建点云坐标（可选）
        
        返回:
        xyz: 点云坐标 [N, 3]（若grid_coord为None，则返回None）
        batch_values: 每个点的batch编号 [N]
        sampling_order: 每个点在其batch内的采样序号 [N]
        global_offsets: 每个点在输入点云中的原始索引 [N]
        """
        batch_values = (codes >> self.batch_shift) & self.batch_mask
        sampling_order = (codes >> self.index_shift) & self.index_mask
        global_offsets = codes & self.offset_mask
        
        # 根据grid_coord和解析出的索引重建坐标
        xyz = None
        if grid_coord is not None:
            assert grid_coord.shape[0] >= global_offsets.max() + 1, "grid_coord长度不足"
            xyz = grid_coord[global_offsets]  # 使用原始索引从grid_coord中提取坐标
        
        if not if_test:
            # 仅在测试时返回xyz
            return xyz[:,0], xyz[:,1], xyz[:,2], batch_values

        return xyz, batch_values, sampling_order, global_offsets

rand = BatchPointEncoderDecoder(max_points_per_batch=204800)
def encode(points, batch_indices):
    """
    编码器：输入点云与batch信息，输出一一对应的code
    
    参数:
    points: 输入点云 [N, 3]
    batch_indices: 每个点的batch编号 [N]，形状必须与points一致
    
    返回:
    codes: 编码结果 [N]，每个元素对应输入点的编码
    """
    return rand.encode(points, batch_indices)
def decode(codes, grid_coord=None):
    """
    解码器：从code中恢复batch、采样序号、原始索引，并根据grid_coord重建坐标
    参数:
    codes: 编码结果 [N]
    grid_coord: 网格坐标 [N, 3]，用于重建点云坐标（可选）
    """
    return rand.decode(codes, grid_coord)

if __name__ == "__main__":
    # 假设有2个batch，每个batch有3个点
    points = torch.tensor([
        [1.0, 2.0, 3.0],   # batch=0，输入索引0
        [4.0, 5.0, 6.0],   # batch=0，输入索引1
        [7.0, 8.0, 9.0],   # batch=0，输入索引2
        [10.0, 11.0, 12.0], # batch=1，输入索引3
        [13.0, 14.0, 15.0], # batch=1，输入索引4
        [16.0, 17.0, 18.0], # batch=1，输入索引5
    ], device='cuda')
    batch_indices = torch.tensor([0, 0, 0, 1, 1, 1], device='cuda')

    # 假设grid_coord是原始点云的网格坐标
    grid_coord = points.clone()  # 实际应用中可能是不同的网格坐标

    # 初始化编码器/解码器
    encoder_decoder = BatchPointEncoderDecoder(max_points_per_batch=204800)

    # 编码
    codes = encoder_decoder.encode(points, batch_indices)
    print("编码结果:")
    print(codes)
    # 解码（带grid_coord）
    xyz, b, order, offsets = encoder_decoder.decode(codes, grid_coord, if_test=True)
    x, y, z, b = encoder_decoder.decode(codes, grid_coord)
    print(x,y,z,b)
    print("解码后的坐标 (x,y,z):")
    print(xyz)
    print("解码后的batch编号 (b):")
    print(b, order, offsets)
