import torch
from torch import nn
from loguru import logger


class TransformerDDoSDetector(nn.Module):
    """基于Transformer编码器的DDoS检测模型"""

    def __init__(self, input_dim: int, num_classes: int = 1):
        super().__init__()
        logger.debug("使用Transformer编码器+时间特征提取构建DDoS检测模型")

        # 输入嵌入层
        self.embedding = nn.Sequential(
            nn.Linear(input_dim, 128),
            nn.LayerNorm(128),
            nn.GELU(),
            nn.Dropout(0.3)
        )

        # Transformer编码器层
        encoder_layer = nn.TransformerEncoderLayer(
            d_model=128,
            nhead=8,
            dim_feedforward=256,
            dropout=0.2,
            activation='gelu',
            batch_first=True
        )
        self.transformer_encoder = nn.TransformerEncoder(encoder_layer, num_layers=4)

        # 时间特征提取
        self.time_attention = nn.Sequential(
            nn.Linear(128, 64),
            nn.Tanh(),
            nn.Linear(64, 1),
            nn.Softmax(dim=1)
        )

        # 分类器
        self.classifier = nn.Sequential(
            nn.Linear(128, 64),
            nn.ReLU(),
            nn.Dropout(0.1),
            nn.Linear(64, num_classes)
        )

    def forward(self, x):
        # x shape: (batch, seq_len, features)
        x = self.embedding(x)
        x = self.transformer_encoder(x)

        # 时间注意力加权
        attention_weights = self.time_attention(x)
        context = torch.sum(attention_weights * x, dim=1)

        return torch.sigmoid(self.classifier(context))
