# -*- coding: UTF-8 -*-
'''
@File ：1_RNN.py
@IDE ：PyCharm
@Author ：chaojie
@Date ：2025/11/6 
@Introduce: RNN 代码
'''

import torch
import torch.nn as nn


def t1():
    # "模拟RNN的执行过程"

    bs, t, e = 2, 5, 4  # 2个文本，每个文本5个token，每个token对应的是4维的特征向量
    E = e
    emb_x = torch.rand(bs, t, e)  # emb_x --> 原始文本分词、token2id、embedding转换后得到的(tt01)
    print(f"输入RNN的数据shape:{emb_x.shape}")

    u = nn.Parameter(torch.randn(e, E))
    w = nn.Parameter(torch.randn(E, E))
    act = nn.LeakyReLU()

    output01 = []
    state_0 = torch.zeros(bs, E)  # 针对第一个时刻计算的特征信息 设为0

    for i in range(t):
        # 针对当前时刻的特征提取 [bs,e] * [e,E] -> [bs,E]
        xzi = torch.matmul(emb_x[:, i, :], u)

        # 上一时刻的特征提取 [bs,E] * [E,E] -> [bs,E]
        zzi = torch.matmul(state_0, w)

        # 合并当前输入和上一时刻的特征信息
        zi = act(xzi + zzi)

        output01.append(zi)

    output01 = torch.stack(output01, dim=1)
    print(f"输出RNN的数据shape:{output01.shape}")


def t2():
    # out: [N, Encoder_T, hidden_size * (2 if bidirectional else 1)]    每个时刻的状态
    # hx: [num_layers * (2 if bidirectional else 1), N, hidden_size] 最后的状态

    rnn = nn.RNN(input_size=4, hidden_size=8, batch_first=True, bidirectional=True)

    x = torch.randn(2, 5, 4)

    out, hx = rnn(x)

    print(out.shape)
    print(hx.shape)


if __name__ == '__main__':
    # t1()
    t2()
