import torch
torch.set_num_threads(1)
import torch.nn as nn
from tab_transformer_pytorch import FTTransformer, TabTransformer

class TransformerClassifier(nn.Module):
    def __init__(self, num_cat_features, num_cont_features, num_classes=2,
                 embedding_dim=16, hidden_dim=128, dropout=0.5, **kwargs):
        super().__init__()
        if 'categories' not in kwargs:
            raise ValueError('categories is required')
        self.categories = kwargs['categories']  # 保存类别大小用于检查
        self.transformer = TabTransformer(
            categories = self.categories,      # tuple containing the number of unique values within each category
            num_continuous = num_cont_features,  # number of continuous values
            dim = embedding_dim,                 # dimension, paper set at 32
            dim_out = num_classes,               # binary prediction, but could be anything
            depth = kwargs['depth'] if 'depth' in kwargs else 6,  # depth, paper recommended 6
            heads = kwargs['heads'] if 'heads' in kwargs else 4,  # heads, paper recommends 8
            attn_dropout = kwargs['attn_dropout'] if 'attn_dropout' in kwargs else dropout,  # post-attention dropout
            ff_dropout = dropout                 # feed forward dropout
        )

    def forward(self, batch):
        x_cat, x_cont, _ = batch

        # 检查每个类别特征列的数值合法性
        for i, max_val in enumerate(self.categories):
            if x_cat[:, i].max() >= max_val or x_cat[:, i].min() < 0:
                raise ValueError(
                    f"Category column {i} has out-of-bound values. "
                    f"Allowed range: [0, {max_val-1}], "
                    f"found min {x_cat[:, i].min().item()}, max {x_cat[:, i].max().item()}"
                )

        x = self.transformer(x_cat, x_cont)
        return x
