import torch
torch.set_num_threads(1)
import torch.nn as nn

class TabularEmbedding(nn.Module):
    """
    表格数据嵌入模块（类别 + 连续特征）
    """
    def __init__(self, num_cat_features, num_cont_features, embedding_dim=16, 
                cont_hidden_dim=128, fuse=False, cont_embed=False):
        super().__init__()
        """
        :param num_cat_features: int，类别特征数量
        :param num_cont_features: int，连续特征数量
        :param embedding_dim: int，类别embedding维度
        :param cont_hidden_dim: int，连续特征投影维度
        :param fuse: bool，是否融合类别和连续特征
        """
        self.cont_embed = cont_embed
        self.fuse = fuse
        
        # 类别特征嵌入
        self.cat_embeddings = nn.ModuleList([
            nn.Embedding(10, embedding_dim)  # 每个embedding层大小 (10, embedding_dim)
            for _ in range(num_cat_features)
        ])
        self.cat_output_dim = num_cat_features * embedding_dim
        
        # 连续特征投影
        self.cont_projection = nn.Linear(num_cont_features, cont_hidden_dim) if num_cont_features > 0 and not cont_embed else None
        # 连续特征嵌入
        self.cont_embeddings = nn.ModuleList([
            nn.Linear(1, embedding_dim) for _ in range(num_cont_features)
        ])
        self.cont_output_dim = cont_hidden_dim if num_cont_features > 0 and not cont_embed else num_cont_features * embedding_dim
        
        # 总特征维度
        self.total_features = self.cat_output_dim + self.cont_output_dim 

    def forward(self, x_cat, x_cont=None):
        """
        :param x_cat: [batch, num_cat_features]
        :param x_cont: [batch, num_cont_features]
        :return: fused: [batch, cat_output_dim + cont_output_dim]
                 x_cat_embedded: [batch, cat_output_dim]
                 x_cont_embedded: [batch, cont_output_dim]
        :return: x_cat_embedded: [batch, embedding_dim, num_cat_features]
                 x_cont_embedded: [batch, cont_output_dim]
        """
        # 类别特征 embedding
        cat_embeds = [emb_layer(x_cat[:, i]) for i, emb_layer in enumerate(self.cat_embeddings)] # (batch, embedding_dim)
        # 连续特征 embedding
        cont_embeds = [emb_layer(x_cont[:, i].unsqueeze(-1)) for i, emb_layer in enumerate(self.cont_embeddings)] if self.cont_embed else None
        
        if self.fuse:
            # 融合模式：展平类别embedding并与连续特征拼接
            x_cat_embedded = torch.cat(cat_embeds, dim=1) if cat_embeds else None
            x_cont_embedded = self.cont_projection(x_cont) if self.cont_projection is not None else None
            fused = torch.cat([x_cat_embedded, x_cont_embedded], dim=1)
            return fused, x_cat_embedded, x_cont_embedded
        else:
            # 无融合模式：保持 [batch, embedding_dim, num_cat_features] 形式
            if cat_embeds is not None:
                x_cat_embedded = torch.stack(cat_embeds, dim=0) # (num_cat_features, batch, embedding_dim)
                x_cat_embedded = x_cat_embedded.permute(1,2,0) # (batch, embedding_dim, num_cat_features)
            else:
                x_cat_embedded = None
                
            if self.cont_projection is not None:
                x_cont_embedded = self.cont_projection(x_cont)  # (batch, cont_hidden_dim)
            elif cont_embeds is not None:
                x_cont_embedded = torch.stack(cont_embeds, dim=1)  # (batch, num_cont_features, embedding_dim)
                x_cont_embedded = x_cont_embedded.permute(0, 2, 1)  # (batch, embedding_dim, num_cont_features)
            else:
                x_cont_embedded = None

            return x_cat_embedded, x_cont_embedded
