import torch
torch.set_num_threads(1)
import torch.nn as nn
from src.models.layer.embedding_layer import TabularEmbedding

class LSTMRegressor(nn.Module):
    def __init__(self, num_cat_features, num_cont_features, num_classes=1,
                 embedding_dim=16, hidden_dim=128, dropout=0.3, num_layers=2, bidirectional=True):
        super().__init__()

        self.embedding = TabularEmbedding(
            num_cat_features, num_cont_features,
            embedding_dim=embedding_dim,
            cont_hidden_dim=hidden_dim,
            fuse=False,
            cont_embed=True
        )

        self.input_dim = embedding_dim
        self.hidden_dim = hidden_dim
        self.num_layers = num_layers
        self.bidirectional = bidirectional

        self.lstm = nn.LSTM(
            input_size=self.input_dim,
            hidden_size=self.hidden_dim,
            num_layers=num_layers,
            batch_first=True,
            bidirectional=bidirectional
        )

        lstm_output_dim = hidden_dim * (2 if bidirectional else 1)
        self.regressor = nn.Sequential(
            nn.Linear(lstm_output_dim, hidden_dim),
            nn.ReLU(inplace=True),
            nn.Dropout(dropout),
            nn.Linear(hidden_dim, num_classes)
        )

    def forward(self, batch):
        x_cat, x_cont, _ = batch
        x_cat_embedded, x_cont_embedded = self.embedding(x_cat, x_cont)

        # 类别特征序列 [batch, num_cat_features, embedding_dim]
        x_cat_seq = x_cat_embedded.permute(0, 2, 1)
        # 连续特征序列 [batch, num_cont_features, embedding_dim]
        x_cont_seq = x_cont_embedded.permute(0, 2, 1)

        # 拼接序列
        x = torch.cat([x_cat_seq, x_cont_seq], dim=1)  # [batch, seq_len, embedding_dim]

        # LSTM
        output, (hn, cn) = self.lstm(x)

        # 双向取最后一层正反向的 hidden state 拼接
        if self.bidirectional:
            out = torch.cat([hn[-2], hn[-1]], dim=1)  # [batch, hidden_dim*2]
        else:
            out = hn[-1]  # [batch, hidden_dim]

        # 回归
        out = self.regressor(out)
        return out.squeeze(-1)
