import numpy as np

from .module import Module
from autograd import Tensor
from autograd.nn.parameter import Parameter
from .. import functional as F

class RNNCellBase(Module):
    def __init__(self, input_size: int, hidden_size: int, num_chunks: int=1) -> None:
        super(RNNCellBase, self).__init__()
        self.input_size = input_size
        self.hidden_size = hidden_size
        self.weight_ih = Parameter(Tensor(num_chunks * hidden_size, input_size))
        self.weight_hh = Parameter(Tensor(num_chunks * hidden_size, hidden_size))
        self.bias = Parameter(Tensor(num_chunks * hidden_size))


class RNNCell(RNNCellBase):
    def __init__(self, input_size: int, hidden_size: int, bias: bool = True, nonlinearity: str = "tanh") -> None:
        super(RNNCell, self).__init__(input_size, hidden_size, bias, num_chunks=1)
        self.nonlinearity = nonlinearity

    def forward(self, input, hx):
        self.check_forward_hidden(input, hx, '')
        if self.nonlinearity == "tanh":
            ret = F.rnn_tanh_cell(
                input, hx,
                self.weight_ih, self.weight_hh,
                self.bias
            )        

        return ret

    
class LSTMCell(RNNCellBase):
    def __init__(self, input_size: int, hidden_size: int, bias: bool = True) -> None:
        super(LSTMCell, self).__init__(input_size, hidden_size, bias, num_chunks=4)

    def forward(self, input, hx, c):
        return F.lstm_cell(input, hx, c, self.weight_ih, self.weight_hh, self.bias)

