# -*- coding:utf-8 -*-
"""
    Paper reference: Deep Knowledge Tracing (https://arxiv.org/abs/1506.05908)
    Module: 只实现前向传播，输出隐藏状态作为软提示
"""

import torch
import torch.nn as nn

class DKTForward(nn.Module):
    def __init__(
        self,
        input_dim,
        hidden_dim,
        layer_num,
        output_dim,
        dropout=0.2,
        device="cpu",
        cell_type="lstm",
    ):
        """
        DKT 前向传播模块，输出隐藏状态作为软提示。
        
        :param input_dim: int, 输入维度。
        :param hidden_dim: int, 隐藏层维度。
        :param layer_num: int, 序列模型层数。
        :param output_dim: int, 输出维度。
        :param dropout: float, dropout 概率，默认 0.2。
        :param device: str or torch.device, 'cpu' or 'cuda'。
        :param cell_type: str, 序列模型类型，支持 'lstm', 'rnn', 'gru'。
        """
        super(DKTForward, self).__init__()
        self.input_dim = input_dim
        self.hidden_dim = hidden_dim
        self.layer_num = layer_num
        self.output_dim = output_dim
        self.dropout = dropout
        self.device = torch.device(device) if isinstance(device, str) else device
        self.cell_type = cell_type.lower()

        if self.cell_type == "lstm":
            self.rnn = nn.LSTM(
                self.input_dim,
                self.hidden_dim,
                self.layer_num,
                batch_first=True,
                dropout=self.dropout if layer_num > 1 else 0,
            ).to(self.device)
        elif self.cell_type == "rnn":
            self.rnn = nn.RNN(
                self.input_dim,
                self.hidden_dim,
                self.layer_num,
                batch_first=True,
                dropout=self.dropout if layer_num > 1 else 0,
            ).to(self.device)
        elif self.cell_type == "gru":
            self.rnn = nn.GRU(
                self.input_dim,
                self.hidden_dim,
                self.layer_num,
                batch_first=True,
                dropout=self.dropout if layer_num > 1 else 0,
            ).to(self.device)
        else:
            raise ValueError("cell_type must be 'lstm', 'rnn', or 'gru'.")

        self.fc = nn.Linear(self.hidden_dim, self.output_dim).to(self.device)

    def forward(self, x, state_in=None):
        """
        前向传播，输入 One-Hot 编码数据，输出隐藏状态和 logits。
        
        :param x: Tensor[float32], [batch_size, 50, 346], One-Hot 编码输入。
        :param state_in: Optional, 初始状态 tensor。
        :return: logits, hidden_state
        """
        x = x.to(self.device)

        h0 = torch.zeros(self.layer_num, x.size(0), self.hidden_dim, device=self.device)
        c0 = torch.zeros(self.layer_num, x.size(0), self.hidden_dim, device=self.device)

        if state_in is None:
            state_in = (h0, c0) if self.cell_type == "lstm" else h0

        state, hidden_state = self.rnn(x, state_in)
        logits = torch.sigmoid(self.fc(state))

        return logits, hidden_state
