from ..builder import MODELS
from .base_classifier import BaseClassifier

import torch
import torch.nn as nn
import torch.nn.functional as F
from ..modules.pointnet2 import PointNetSetAbstraction, PointNetSetAbstractionMsg
from .lsa_net import PointLBR, LSA, PointMaxPooling, PointAttention, PointGlobalAvgPooling

class Permute(nn.Module):
    def __init__(self, dims):
        super().__init__()
        self.dims = dims
    def forward(self, inputs):
        if isinstance(inputs, torch.Tensor):
            return inputs.permute(self.dims)
        else:
            return [x.permute(d) for x,d in zip(inputs, self.dims)]

class ConcatXYZ(nn.Module):
    def __init__(self):
        super().__init__()
    def forward(self, inputs):
        xyz, data = inputs # [BS, N, C]
        data = torch.cat([data, xyz], dim=-1)
        return xyz, data

@MODELS.register_module()
class PointNet_cls_dev(BaseClassifier):
    def __init__(self, num_class, use_msg=True):
        super().__init__()
        in_channel = 3
        # self.sa1 = PointNetSetAbstraction(npoint=512, radius=0.2, nsample=32, in_channel=in_channel, mlp=[64, 64, 128], group_all=False)
        # self.sa2 = PointNetSetAbstraction(npoint=128, radius=0.4, nsample=64, in_channel=128 + 3, mlp=[128, 128, 256], group_all=False)
        # self.sa3 = PointNetSetAbstraction(npoint=None, radius=None, nsample=None, in_channel=256 + 3, mlp=[256, 512, 1024], group_all=True)
        self.fc1 = nn.Linear(1024, 512)
        self.bn1 = nn.BatchNorm1d(512)
        self.drop1 = nn.Dropout(0.4)
        self.fc2 = nn.Linear(512, 256)
        self.bn2 = nn.BatchNorm1d(256)
        self.drop2 = nn.Dropout(0.5)
        self.fc3 = nn.Linear(256, num_class)

        #sa2
        self.sa1 = nn.Sequential(
            Permute([(0, 2, 1), (0, 2, 1)]),
            PointLBR(3, 8),
            PointAttention(qdim=8, kdim=8, vdim=8, qkdim=8, edim=8, nhead=2, local_size=9, relative=True, cat_xyz=False, dropout=0.0),
            PointLBR(8, 32),
            PointAttention(qdim=32, kdim=32, vdim=32, qkdim=32, edim=32, nhead=4, local_size=9, relative=False, cat_xyz=True, dropout=0.0),
            PointLBR(32, 128),
            PointMaxPooling(9, 2),
            Permute([(0, 2, 1), (0, 2, 1)]),
        )
        self.sa2 = nn.Sequential(
            Permute([(0, 2, 1), (0, 2, 1)]),
            PointAttention(qdim=128, kdim=128, vdim=128, qkdim=64, edim=128, nhead=4, local_size=16, relative=False, cat_xyz=True, dropout=0.0),
            PointLBR(128, 128),
            PointLBR(128, 256),
            PointMaxPooling(9, 2),
            Permute([(0, 2, 1), (0, 2, 1)]),
        )
        self.sa3 = nn.Sequential(
            Permute([(0, 2, 1), (0, 2, 1)]),
            ConcatXYZ(),
            PointLBR(256+3, 256),
            PointLBR(256, 512),
            PointLBR(512, 1024),
            PointMaxPooling(None, None, is_global=True),
            Permute([(0, 2, 1), (0, 2, 1)]),
        )

    def inference(self, points):
        B, _, _ = points.shape
        l1_xyz, l1_points = self.sa1((points, points))
        l2_xyz, l2_points = self.sa2((l1_xyz, l1_points))
        l3_xyz, l3_points = self.sa3((l2_xyz, l2_points))
        x = l3_points.reshape(B, 1024)
        x = self.drop1(F.relu(self.bn1(self.fc1(x))))
        x = self.drop2(F.relu(self.bn2(self.fc2(x))))
        logits = self.fc3(x)
        return logits
