| """
|
| 3DGS Codebook Builder
|
| =====================
|
| ไฝฟ็จ KMeans ๅฏน 3D Gaussian Splatting ๆจกๅ็ๅ็ฑป็นๅพๅๅซๆๅปบ codebook๏ผ
|
| - scale (3็ปด) โ 16384 ไธช็ฆปๆฃ็ดขๅผ
|
| - rotation (4็ปด) โ 16384 ไธช็ฆปๆฃ็ดขๅผ
|
| - DC (3็ปด) โ 4096 ไธช็ฆปๆฃ็ดขๅผ
|
| - SH rest (45็ปด) โ 4096 ไธช็ฆปๆฃ็ดขๅผ
|
|
|
| ๆฏไธช codebook ๅ็ฌไฟๅญไธบ .npz ๆไปถ๏ผๅ
ๅซ๏ผ
|
| - codebook : (K, D) float32 โโ ่็ฑปไธญๅฟ
|
| - indices : (N,) int32 โโ ๆฏไธช้ซๆฏ็นๅฏนๅบ็็ดขๅผ
|
| """
|
|
|
| import os
|
| import argparse
|
| import numpy as np
|
| from plyfile import PlyData
|
| from sklearn.cluster import MiniBatchKMeans
|
| import time
|
|
|
|
|
|
|
|
|
|
|
|
|
| def read_ply(ply_path: str) -> dict:
|
| """่ฏปๅ 3DGS .ply ๆไปถ๏ผ่ฟๅๅๅฑๆง numpy ๆฐ็ปใ"""
|
| plydata = PlyData.read(ply_path)
|
| vertex = plydata['vertex']
|
|
|
| positions = np.stack([vertex['x'], vertex['y'], vertex['z']], axis=1)
|
| opacities = vertex['opacity'][:, np.newaxis]
|
| scales = np.stack([vertex['scale_0'], vertex['scale_1'],
|
| vertex['scale_2']], axis=1)
|
| rotations = np.stack([vertex['rot_0'], vertex['rot_1'],
|
| vertex['rot_2'], vertex['rot_3']], axis=1)
|
| dc = np.stack([vertex['f_dc_0'], vertex['f_dc_1'],
|
| vertex['f_dc_2']], axis=1)
|
|
|
| sh_keys = sorted(
|
| [k for k in vertex.data.dtype.names if k.startswith('f_rest_')],
|
| key=lambda s: int(s.split('_')[-1])
|
| )
|
| sh_rest = np.stack([vertex[k] for k in sh_keys], axis=1) \
|
| if sh_keys else None
|
|
|
|
|
| filter_3d = None
|
| if 'filter_3D' in vertex.data.dtype.names:
|
| filter_3d = vertex['filter_3D'][:, np.newaxis]
|
|
|
| print(f"[read_ply] ่ฏปๅๅฎๆ๏ผ{positions.shape[0]} ไธช้ซๆฏ็น")
|
| if sh_rest is not None:
|
| print(f" SH rest ็ปดๅบฆ๏ผ{sh_rest.shape[1]} "
|
| f"๏ผๆๆ 45 = 15 ็่ฐ็ณปๆฐ ร 3 ้้๏ผ")
|
|
|
| return {
|
| 'positions': positions,
|
| 'opacities': opacities,
|
| 'scales': scales,
|
| 'rotations': rotations,
|
| 'dc': dc,
|
| 'sh_rest': sh_rest,
|
| 'filter_3d': filter_3d,
|
| 'plydata': plydata,
|
| }
|
|
|
|
|
|
|
|
|
|
|
|
|
| def build_codebook(
|
| features: np.ndarray,
|
| n_clusters: int,
|
| name: str,
|
| random_state: int = 42,
|
| batch_size: int = 65536,
|
| max_iter: int = 300,
|
| ) -> tuple[np.ndarray, np.ndarray]:
|
| """
|
| ๅฏน features (N, D) ๆง่ก MiniBatchKMeans๏ผ่ฟๅ๏ผ
|
| codebook : (K, D) float32
|
| indices : (N,) int32
|
| """
|
| N, D = features.shape
|
|
|
| K = min(n_clusters, N)
|
| if K < n_clusters:
|
| print(f"[{name}] ่ญฆๅ๏ผ้ซๆฏ็นๆฐ ({N}) < ็ฎๆ cluster ๆฐ ({n_clusters})๏ผ"
|
| f"่ชๅจ่ฐๆดไธบ K={K}")
|
|
|
| print(f"[{name}] ๅผๅง KMeans๏ผN={N}, D={D}, K={K} ...")
|
| t0 = time.time()
|
|
|
| kmeans = MiniBatchKMeans(
|
| n_clusters=K,
|
| batch_size=min(batch_size, N),
|
| max_iter=max_iter,
|
| random_state=random_state,
|
| n_init=3,
|
| verbose=0,
|
| )
|
| kmeans.fit(features.astype(np.float32))
|
|
|
| codebook = kmeans.cluster_centers_.astype(np.float32)
|
| indices = kmeans.labels_.astype(np.int32)
|
|
|
| elapsed = time.time() - t0
|
| inertia = kmeans.inertia_
|
| print(f"[{name}] ๅฎๆ๏ผ่ๆถ {elapsed:.1f}s | inertia={inertia:.4f}")
|
| print(f" codebook shape: {codebook.shape} | "
|
| f"็ดขๅผ่ๅด: [{indices.min()}, {indices.max()}]")
|
|
|
| return codebook, indices
|
|
|
|
|
|
|
|
|
|
|
|
|
| def save_codebook(
|
| save_dir: str,
|
| name: str,
|
| codebook: np.ndarray,
|
| indices: np.ndarray,
|
| ) -> None:
|
| """ๅฐ codebook ๅ indices ๅญไธบ <name>_codebook.npzใ"""
|
| os.makedirs(save_dir, exist_ok=True)
|
| out_path = os.path.join(save_dir, f"{name}_codebook.npz")
|
| np.savez_compressed(out_path, codebook=codebook, indices=indices)
|
| size_mb = os.path.getsize(out_path) / 1024 / 1024
|
| print(f"[{name}] ๅทฒไฟๅญ โ {out_path} ({size_mb:.2f} MB)")
|
|
|
|
|
|
|
|
|
|
|
|
|
| CODEBOOK_CONFIG = {
|
|
|
| 'scale': 16384,
|
| 'rotation': 16384,
|
| 'dc': 4096,
|
| 'sh': 4096,
|
| }
|
|
|
|
|
| def build_all_codebooks(
|
| ply_path: str,
|
| save_dir: str,
|
| random_state: int = 42,
|
| ) -> dict:
|
| """
|
| ่ฏปๅ PLY โ ๅฏนๅ็ฑป็นๅพๅๅซ่็ฑป โ ๅๅผไฟๅญใ
|
|
|
| ่ฟๅๅญๅ
ธ๏ผ
|
| {
|
| 'scale': (codebook_array, indices_array),
|
| 'rotation': ...,
|
| 'dc': ...,
|
| 'sh': ...,
|
| }
|
| """
|
|
|
| data = read_ply(ply_path)
|
|
|
| scales = data['scales']
|
| rotations = data['rotations']
|
| dc = data['dc']
|
| sh_rest = data['sh_rest']
|
|
|
| if sh_rest is None:
|
| raise ValueError("PLY ๆไปถไธญๆชๆพๅฐ f_rest_* ๅญๆฎต๏ผๆ ๆณๆๅปบ SH codebookใ")
|
|
|
| feature_map = {
|
| 'scale': scales,
|
| 'rotation': rotations,
|
| 'dc': dc,
|
| 'sh': sh_rest,
|
| }
|
|
|
|
|
| results = {}
|
| for name, n_clusters in CODEBOOK_CONFIG.items():
|
| features = feature_map[name]
|
| print(f"\n{'='*55}")
|
| print(f" ๆๅปบ [{name}] codebook | ็นๅพ็ปดๅบฆ: {features.shape[1]}"
|
| f" | ็ฎๆ K: {n_clusters}")
|
| print(f"{'='*55}")
|
|
|
| codebook, indices = build_codebook(
|
| features,
|
| n_clusters=n_clusters,
|
| name=name,
|
| random_state=random_state,
|
| )
|
| save_codebook(save_dir, name, codebook, indices)
|
| results[name] = (codebook, indices)
|
|
|
| print(f"\n{'='*55}")
|
| print(" ๆๆ codebook ๆๅปบๅฎๆฏ๏ผ")
|
| print(f" ่พๅบ็ฎๅฝ๏ผ{os.path.abspath(save_dir)}")
|
| print(f"{'='*55}")
|
| return results
|
|
|
|
|
|
|
|
|
|
|
|
|
| def evaluate_codebooks(
|
| ply_path: str,
|
| save_dir: str,
|
| ) -> None:
|
| """
|
| ๅ ่ฝฝๅทฒไฟๅญ็ๅไธช codebook๏ผ้ๅปบ็นๅพ๏ผ
|
| ่ฎก็ฎๆฏไธช็ปดๅบฆ็ๅๆนๆ น่ฏฏๅทฎ๏ผRMSE๏ผใ
|
| """
|
| data = read_ply(ply_path)
|
| feature_map = {
|
| 'scale': data['scales'],
|
| 'rotation': data['rotations'],
|
| 'dc': data['dc'],
|
| 'sh': data['sh_rest'],
|
| }
|
|
|
| print("\n[่ฏไผฐ] ้ๅปบ่ฏฏๅทฎ๏ผRMSE๏ผ๏ผ")
|
| for name in CODEBOOK_CONFIG:
|
| path = os.path.join(save_dir, f"{name}_codebook.npz")
|
| if not os.path.exists(path):
|
| print(f" [{name}] ๆไปถไธๅญๅจ๏ผ่ทณ่ฟ")
|
| continue
|
|
|
| npz = np.load(path)
|
| codebook = npz['codebook']
|
| indices = npz['indices']
|
|
|
| original = feature_map[name].astype(np.float32)
|
| reconstructed = codebook[indices]
|
|
|
| rmse = np.sqrt(np.mean((original - reconstructed) ** 2))
|
| max_err = np.abs(original - reconstructed).max()
|
| print(f" [{name:8s}] K={codebook.shape[0]:6d} D={codebook.shape[1]:3d}"
|
| f" RMSE={rmse:.6f} MaxErr={max_err:.6f}")
|
|
|
|
|
|
|
|
|
|
|
|
|
| def parse_args():
|
| parser = argparse.ArgumentParser(
|
| description="ไธบ 3DGS .ply ๆไปถๆๅปบๅไธช KMeans codebook"
|
| )
|
| parser.add_argument('ply_path', type=str,default="./merge/original_3dgs.ply",
|
| help='่พๅ
ฅ็ 3DGS .ply ๆไปถ่ทฏๅพ')
|
| parser.add_argument('--save_dir', type=str, default='./codebooks',
|
| help='codebook ไฟๅญ็ฎๅฝ๏ผ้ป่ฎค๏ผ./codebooks๏ผ')
|
| parser.add_argument('--seed', type=int, default=42,
|
| help='้ๆบ็งๅญ๏ผ้ป่ฎค๏ผ42๏ผ')
|
| parser.add_argument('--evaluate', action='store_true',
|
| help='ๆๅปบๅฎๆๅ่ฎก็ฎ RMSE ้ๅปบ่ฏฏๅทฎ')
|
| return parser.parse_args()
|
|
|
|
|
| if __name__ == '__main__':
|
| args = parse_args()
|
|
|
| build_all_codebooks(
|
| ply_path=args.ply_path,
|
| save_dir=args.save_dir,
|
| random_state=args.seed,
|
| )
|
|
|
| if args.evaluate:
|
| evaluate_codebooks(
|
| ply_path=args.ply_path,
|
| save_dir=args.save_dir,
|
| ) |