# -*- coding: UTF-8 -*-
'''
@File ：model_cls.py
@IDE ：PyCharm
@Author ：chaojie
@Date ：2025/7/4 
@Introduce:
'''

import torch.nn as nn
import torch

class text_cls_model_rnn(nn.Module):
    def __init__(self, emb_size, vocab_size, num_class, hidden_size=128, num_layers=5):
        super(text_cls_model_rnn, self).__init__()
        self.emb = nn.Embedding(vocab_size, embedding_dim=emb_size)

        self.rnn = nn.RNN(input_size=emb_size, hidden_size=hidden_size,
                          num_layers=num_layers,
                          batch_first=True)

        self.cls = nn.Linear(hidden_size, num_class)

    def forward(self, x):
        x = self.emb(x)  # [batch_size, seq_len] -> [batch_size, embed_dim]
        output, hidden = self.rnn(x)  # 添加时间维度

        x = torch.mean(output, dim=1)
        return self.cls(x)  # [batch_size, hidden_dim]


class text_cls_model_lstm(nn.Module):
    def __init__(self, emb_size, vocab_size, num_class, hidden_size=128, num_layers=5):
        super().__init__()
        self.emb = nn.Embedding(vocab_size, embedding_dim=emb_size)

        self.rnn = nn.LSTM(input_size=emb_size, hidden_size=hidden_size,
                          num_layers=num_layers,
                          batch_first=True)

        self.cls = nn.Linear(hidden_size, num_class)

    def forward(self, x):
        x = self.emb(x)  # [batch_size, seq_len] -> [batch_size, embed_dim]
        output, hidden = self.rnn(x)  # 添加时间维度

        x = torch.mean(output, dim=1)
        # x = output[:, -1, :]
        return self.cls(x)  # [batch_size, hidden_dim]

def t1():
    model = text_cls_model_rnn(128, 1000, 10)

    x = torch.randint(0, 1000, (8, 10))
    print(model(x).shape)

def t2():
    model = text_cls_model_lstm(128, 1000, 10)
    print( model)
    x = torch.randint(0, 1000, (8, 10))
    print(model(x).shape)

if __name__ == '__main__':
    t2()