#!/usr/bin/env python3
"""
Point Cloud to 3D Gaussian Splatting Conversion Script

This script converts point cloud data (Np, 6) where 6 denotes [x, y, z, r, g, b]
to 3D Gaussian representation for 3D Gaussian Splatting.

Each 3D Gaussian is characterized by:
- Position (mean): 3D coordinates (x, y, z)  
- Covariance matrix: 3x3 matrix defining shape and orientation
- Opacity: transparency value (alpha)
- Color: RGB values or Spherical Harmonics coefficients for view-dependent appearance

Usage:
    This module provides pcd2gs function for converting single frame point cloud data
    to 3D Gaussian representation within the unified data format.
"""
import numpy as np
import torch
import zarr
import argparse
import os
from pathlib import Path
from typing import Tuple, Optional, Union
import logging
from scipy.spatial.distance import cdist
from sklearn.neighbors import NearestNeighbors
import warnings
from scalePcd import scalePcd
warnings.filterwarnings('ignore')

# Set up logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)


class PointCloudToGaussianConverter:
    """
    Converts point cloud data to 3D Gaussian representation.
    
    The conversion process involves:
    1. Loading point cloud data from single frame
    2. Estimating local geometry for each point
    3. Computing covariance matrices based on local neighborhood
    4. Initializing Gaussian parameters (position, covariance, color, opacity)
    5. Returning 3D Gaussian parameters
    """
    
    def __init__(self, 
                 k_neighbors: int = 16,
                 initial_opacity: float = 0.8,
                 scale_factor: float = 1.0,
                 use_spherical_harmonics: bool = False,
                 sh_degree: int = 3):
        """
        Initialize the converter.
        
        Args:
            k_neighbors: Number of nearest neighbors for covariance estimation
            initial_opacity: Initial opacity value for all Gaussians
            scale_factor: Scale factor for covariance matrices
            use_spherical_harmonics: Whether to use SH for view-dependent colors
            sh_degree: Degree of spherical harmonics (0-3)
        """
        self.k_neighbors = k_neighbors
        self.initial_opacity = initial_opacity
        self.scale_factor = scale_factor
        self.use_spherical_harmonics = use_spherical_harmonics
        self.sh_degree = sh_degree
        
        # Calculate number of SH coefficients
        self.sh_coeffs = (sh_degree + 1) ** 2 if use_spherical_harmonics else 0
        
    def estimate_covariance_from_neighbors(self, points: np.ndarray) -> np.ndarray:
        """
        Estimate covariance matrices for each point based on local neighborhood.
        
        Args:
            points: Point coordinates of shape (N, 3)
            
        Returns:
            Covariance matrices of shape (N, 3, 3)
        """
        logger.debug("Estimating covariance matrices from local neighborhoods")
        
        n_points = points.shape[0]
        covariances = np.zeros((n_points, 3, 3))
        
        # Use KNN to find neighbors
        nbrs = NearestNeighbors(n_neighbors=min(self.k_neighbors, n_points), algorithm='ball_tree')
        nbrs.fit(points)
        
        for i in range(n_points):
            # Find k nearest neighbors
            distances, indices = nbrs.kneighbors(points[i:i+1])
            neighbor_points = points[indices[0]]
            
            # Center the points
            centered_points = neighbor_points - points[i]
            
            # Compute covariance matrix
            if len(centered_points) > 1:
                cov = np.cov(centered_points.T)
                
                # Ensure positive definite by adding small regularization
                cov += np.eye(3) * 1e-6
                
                # Scale the covariance
                cov *= self.scale_factor
                
                covariances[i] = cov
            else:
                # Fallback for isolated points
                covariances[i] = np.eye(3) * self.scale_factor * 1e-3
                
        return covariances
        
    def covariance_to_scaling_rotation(self, covariances: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
        """
        Decompose covariance matrices into scaling and rotation components.
        
        For 3D Gaussian Splatting, we represent covariance as:
        Σ = R * S * S^T * R^T
        where R is rotation matrix and S is diagonal scaling matrix.
        
        Args:
            covariances: Covariance matrices of shape (N, 3, 3)
            
        Returns:
            Tuple of (scaling, rotation_quaternions)
            - scaling: (N, 3) scaling factors
            - rotation_quaternions: (N, 4) quaternions [w, x, y, z]
        """
        logger.debug("Decomposing covariance matrices into scaling and rotation")
        
        n_points = covariances.shape[0]
        scaling = np.zeros((n_points, 3))
        quaternions = np.zeros((n_points, 4))
        
        for i in range(n_points):
            cov = covariances[i]
            
            # Eigenvalue decomposition
            eigenvals, eigenvecs = np.linalg.eigh(cov)
            
            # Ensure positive eigenvalues
            eigenvals = np.maximum(eigenvals, 1e-6)
            
            # Scaling is square root of eigenvalues
            scaling[i] = np.sqrt(eigenvals)
            
            # Rotation matrix from eigenvectors
            # Ensure proper rotation matrix (det = 1)
            if np.linalg.det(eigenvecs) < 0:
                eigenvecs[:, 0] *= -1
                
            # Convert rotation matrix to quaternion
            quaternions[i] = self.rotation_matrix_to_quaternion(eigenvecs)
            
        return scaling, quaternions
        
    def rotation_matrix_to_quaternion(self, R: np.ndarray) -> np.ndarray:
        """
        Convert rotation matrix to quaternion [w, x, y, z].
        
        Args:
            R: 3x3 rotation matrix
            
        Returns:
            Quaternion as [w, x, y, z]
        """
        # Shepperd's method for numerical stability
        trace = np.trace(R)
        
        if trace > 0:
            s = np.sqrt(trace + 1.0) * 2  # s = 4 * qw
            qw = 0.25 * s
            qx = (R[2, 1] - R[1, 2]) / s
            qy = (R[0, 2] - R[2, 0]) / s
            qz = (R[1, 0] - R[0, 1]) / s
        elif R[0, 0] > R[1, 1] and R[0, 0] > R[2, 2]:
            s = np.sqrt(1.0 + R[0, 0] - R[1, 1] - R[2, 2]) * 2  # s = 4 * qx
            qw = (R[2, 1] - R[1, 2]) / s
            qx = 0.25 * s
            qy = (R[0, 1] + R[1, 0]) / s
            qz = (R[0, 2] + R[2, 0]) / s
        elif R[1, 1] > R[2, 2]:
            s = np.sqrt(1.0 + R[1, 1] - R[0, 0] - R[2, 2]) * 2  # s = 4 * qy
            qw = (R[0, 2] - R[2, 0]) / s
            qx = (R[0, 1] + R[1, 0]) / s
            qy = 0.25 * s
            qz = (R[1, 2] + R[2, 1]) / s
        else:
            s = np.sqrt(1.0 + R[2, 2] - R[0, 0] - R[1, 1]) * 2  # s = 4 * qz
            qw = (R[1, 0] - R[0, 1]) / s
            qx = (R[0, 2] + R[2, 0]) / s
            qy = (R[1, 2] + R[2, 1]) / s
            qz = 0.25 * s
            
        return np.array([qw, qx, qy, qz])
        
    def rgb_to_spherical_harmonics(self, colors: np.ndarray) -> np.ndarray:
        """
        Convert RGB colors to spherical harmonics coefficients.
        
        For simplicity, we initialize with DC component (l=0, m=0) set to RGB
        and higher order terms to zero.
        
        Args:
            colors: RGB colors of shape (N, 3)
            
        Returns:
            SH coefficients of shape (N, 3, sh_coeffs)
        """
        n_points = colors.shape[0]
        sh_coeffs = np.zeros((n_points, 3, self.sh_coeffs))
        
        # DC component (l=0, m=0) - constant term
        # SH basis function Y_0^0 = 1/(2*sqrt(pi)) ≈ 0.28209479
        sh_coeffs[:, :, 0] = colors / 0.28209479
        
        # Higher order terms initialized to zero for now
        # In practice, these would be optimized during training
        
        return sh_coeffs
        
    def convert_single_frame(self, points: np.ndarray, colors: np.ndarray) -> dict:
        """
        Convert a single frame of point cloud to 3D Gaussians.
        
        Args:
            points: Point coordinates of shape (Np, 3)
            colors: RGB colors of shape (Np, 3)
            
        Returns:
            Dictionary containing Gaussian parameters
        """
        n_points = points.shape[0]
        logger.debug(f"Converting {n_points} points to 3D Gaussians")
        
        # 确保颜色在 0-255 范围
        colors = scalePcd(colors, tag="pcd2gs_colors")
        
        # Colors should already be in 0-255 range, no normalization needed
        
        # Estimate covariance matrices
        covariances = self.estimate_covariance_from_neighbors(points)
        
        # Decompose covariance into scaling and rotation
        scaling, quaternions = self.covariance_to_scaling_rotation(covariances)
        
        # Initialize opacity
        opacity = np.full((n_points,), self.initial_opacity, dtype=np.float32)
        
        # Handle colors
        if self.use_spherical_harmonics:
            sh_coeffs = self.rgb_to_spherical_harmonics(colors)
            color_data = sh_coeffs
        else:
            color_data = colors
            
        gaussian_params = {
            'positions': points.astype(np.float32),
            'scaling': scaling.astype(np.float32),
            'rotation': quaternions.astype(np.float32),
            'opacity': opacity,
            'colors': color_data.astype(np.float32),
            'use_sh': self.use_spherical_harmonics,
            'sh_degree': self.sh_degree if self.use_spherical_harmonics else 0
        }
        
        return gaussian_params


def pcd2gs(unifiedData: dict[str, Union[np.ndarray, dict]]) -> dict[str, Union[np.ndarray, dict]]:
    """
    Convert point cloud data to 3D Gaussian representation for single frame.
    
    Args:
        unifiedData: dict containing single frame data with keys:
                    ["gaussian":dict,"rgb":np.ndarray,"depth":np.ndarray,
                     "pointcloud":np.ndarray,"observer":np.ndarray,
                     "endpose":np.ndarray,"qpos":np.ndarray]
                    where pointcloud is shape (Np, 6) for single frame

    Returns:
        Dictionary with gaussian data added to unifiedData
    """
    converter = PointCloudToGaussianConverter(k_neighbors=16, 
                            initial_opacity=0.8,
                            scale_factor=1.5,
                            use_spherical_harmonics=False,
                            sh_degree=3)
    
    point_cloud_data = unifiedData['pointcloud']  # Shape: (Np, 6)
    
    # Validate input shape - ensure it's a numpy array
    if not isinstance(point_cloud_data, np.ndarray):
        raise ValueError(f"Expected pointcloud to be numpy array, got {type(point_cloud_data)}")
    
    if len(point_cloud_data.shape) != 2 or point_cloud_data.shape[1] != 6:
        raise ValueError(f"Expected point cloud shape (Np, 6), got {point_cloud_data.shape}")
    
    # Filter out zero/invalid points
    valid_mask = np.any(point_cloud_data[:, :3] != 0, axis=1)
    if not np.any(valid_mask):
        logger.warning("No valid points in frame")
        exit()
    else:
        points = point_cloud_data[valid_mask, :3]  # xyz
        colors = point_cloud_data[valid_mask, 3:6]  # rgb
        
        # Convert to 3D Gaussians
        gaussian_params = converter.convert_single_frame(points, colors)
    
    unifiedData['gaussian'] = gaussian_params
    return unifiedData
