#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Author: 邵奈一
@Email: shaonaiyi@163.com
@Date: 2024/11/18
@微信：shaonaiyi888
@微信公众号: 邵奈一 
"""
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Author: 邵奈一
@Email: shaonaiyi@163.com
@Date: 2024/11/15
@微信：shaonaiyi888
@微信公众号: 邵奈一 
"""
# 代码5-4
# 文本生成类的构造函数
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F


class TextGenerator(nn.ModuleList):

    def __init__(self, args, vocab_size):
        super(TextGenerator, self).__init__()

        self.batch_size = args.batch_size
        self.hidden_dim = args.hidden_dim
        self.input_size = vocab_size
        self.num_classes = vocab_size
        self.sequence_len = args.window

        # Dropout
        self.dropout = nn.Dropout(0.25)

        # Embedding层
        self.embedding = nn.Embedding(self.input_size, self.hidden_dim, padding_idx=0)

        # Bi-LSTM
        # 正向和反向
        self.lstm_cell_forward = nn.LSTMCell(self.hidden_dim, self.hidden_dim)
        self.lstm_cell_backward = nn.LSTMCell(self.hidden_dim, self.hidden_dim)

        # LSTM层
        self.lstm_cell = nn.LSTMCell(self.hidden_dim * 2, self.hidden_dim * 2)

        # 全连接层
        self.linear = nn.Linear(self.hidden_dim * 2, self.num_classes)


    # 代码5-5、代码5-6
    # Bi-LSTM+LSTM+全连接层
    def forward(self, x):

        # Bi-LSTM
        # hs = [batch_size×hidden_size]
        # cs = [batch_size×hidden_size]
        hs_forward = torch.zeros(x.size(0), self.hidden_dim)
        cs_forward = torch.zeros(x.size(0), self.hidden_dim)
        hs_backward = torch.zeros(x.size(0), self.hidden_dim)
        cs_backward = torch.zeros(x.size(0), self.hidden_dim)

        # LSTM
        # hs = [batch_size×(hidden_size * 2)]
        # cs = [batch_size×(hidden_size * 2)]
        hs_lstm = torch.zeros(x.size(0), self.hidden_dim * 2)
        cs_lstm = torch.zeros(x.size(0), self.hidden_dim * 2)

        # 权重初始化
        torch.nn.init.kaiming_normal_(hs_forward)
        torch.nn.init.kaiming_normal_(cs_forward)
        torch.nn.init.kaiming_normal_(hs_backward)
        torch.nn.init.kaiming_normal_(cs_backward)
        torch.nn.init.kaiming_normal_(hs_lstm)
        torch.nn.init.kaiming_normal_(cs_lstm)

        # 从idx到 embedding
        out = self.embedding(x)

        # 为LSTM准备shape
        out = out.view(self.sequence_len, x.size(0), -1)

        forward = []
        backward = []

        # 解开Bi-LSTM
        # 正向
        for i in range(self.sequence_len):
            hs_forward, cs_forward = self.lstm_cell_forward(out[i], (hs_forward, cs_forward))
            forward.append(hs_forward)
        # 反向
        for i in reversed(range(self.sequence_len)):
            hs_backward, cs_backward = self.lstm_cell_backward(out[i], (hs_backward, cs_backward))
            backward.append(hs_backward)

        # LSTM
        for fwd, bwd in zip(forward, backward):
            input_tensor = torch.cat((fwd, bwd), 1)
            hs_lstm, cs_lstm = self.lstm_cell(input_tensor, (hs_lstm, cs_lstm))

        # 最后一个隐藏状态通过全连接层
        out = self.linear(hs_lstm)

        return out
