import torch
torch.set_num_threads(1)
import torch.nn as nn
import torch.nn.functional as F
from src.models.layer.embedding_layer import TabularEmbedding

class AlexNetClassifier(nn.Module):
    """适配表格数据的AlexNet模型"""
    def __init__(self, num_cat_features, num_cont_features, num_classes=2, 
                 embedding_dim=8, hidden_dim=128, dropout=0.5):
        super(AlexNetClassifier, self).__init__()
        
        # 类别特征嵌入
        self.feature_extractor = TabularEmbedding(
            num_cat_features, num_cont_features, embedding_dim, hidden_dim, fuse=False
        )
        
        # AlexNet卷积处理类别特征
        self.cat_features = nn.Sequential(
            nn.Conv1d(embedding_dim, 96, kernel_size=11, stride=4, padding=0),
            nn.ReLU(inplace=True),
            nn.MaxPool1d(kernel_size=3, stride=2),
            nn.Conv1d(96, 256, kernel_size=5, stride=1, padding=2),
            nn.ReLU(inplace=True),
            nn.MaxPool1d(kernel_size=3, stride=2),
            nn.Conv1d(256, 256, kernel_size=3, stride=1, padding=1),
            nn.ReLU(inplace=True),
        )
        
        # 连续特征处理
        self.cont_features = nn.Sequential(
            nn.Linear(num_cont_features, hidden_dim),
            nn.ReLU(inplace=True),
        )
        
        # 动态计算卷积输出维度
        with torch.no_grad():
            dummy = torch.zeros(1, embedding_dim, num_cat_features)
            cat_out = self.cat_features(dummy)
            cat_flat_dim = cat_out.numel()  # [1, 256, L] -> flatten 后的长度
        
        # 分类头
        self.classifier = nn.Sequential(
            nn.Dropout(dropout),
            nn.Linear(cat_flat_dim + hidden_dim, hidden_dim),
            nn.ReLU(inplace=True),
            nn.Dropout(dropout),
            nn.Linear(hidden_dim, num_classes)
        )

    def forward(self, batch):
        x_cat, x_cont, _ = batch
        # 类别和连续特征嵌入
        cat_embedded, _ = self.feature_extractor(x_cat, x_cont)
        
        # 类别特征卷积
        cat_out = self.cat_features(cat_embedded)       # [batch, 256, L_out]
        cat_flat = cat_out.flatten(1)                   # [batch, 256*L_out]
        
        # 连续特征
        cont_out = self.cont_features(x_cont)          # [batch, hidden_dim]
        
        # 拼接
        fused = torch.cat([cat_flat, cont_out], dim=1) # [batch, cat_flat + hidden_dim]
        
        # 分类
        output = self.classifier(fused)
        return output