import paddle.nn as nn
import paddle

class LSTM(paddle.nn.Layer):
    def __init__(self, input_band, hidden_band):
        super(LSTM, self).__init__()

        # The input gate
        self.input_gate = nn.Conv2D(input_band + hidden_band, hidden_band, 3, 1, 1)

        # The forget gate
        self.forget_gate = nn.Conv2D(input_band + hidden_band, hidden_band, 3, 1, 1)

        # The cell gate
        self.cell_gate = nn.Conv2D(input_band + hidden_band, hidden_band, 3, 1, 1)

        # The output gate
        self.output_gate = nn.Conv2D(input_band + hidden_band, hidden_band, 3, 1, 1)


    def forward(self, x, h_prev, c_prev):
        x_h = paddle.concat([x, h_prev], axis=1)

        # Compute the input gate
        input_gate = paddle.nn.functional.sigmoid(self.input_gate(x_h))

        # Compute the forget gate
        forget_gate = paddle.nn.functional.sigmoid(self.forget_gate(x_h))

        # Compute the cell gate
        cell_gate = paddle.tanh(self.cell_gate(x_h))

        # Compute the output gate
        output_gate = paddle.nn.functional.sigmoid(self.output_gate(x_h))

        # Update the cell state
        c_new = forget_gate * c_prev + input_gate * cell_gate

        # Compute the new hidden state
        h_new = output_gate * paddle.tanh(c_new)

        return h_new, c_new
    

lstm = LSTM(40, 40)

# Initialize the hidden state
h_prev = paddle.zeros([10, 40, 9, 9])
c_prev = paddle.zeros([10, 40, 9, 9])

# Input a sequence of 3 words
x = paddle.standard_normal(shape=[10, 40, 9, 9])

# Forward propagate the LSTM layer
h_new, c_new = lstm(x, h_prev, c_prev)

# Print the hidden state
print(h_new)