from typing import Optional

import torch

from zkl_llmpt_nbptt.deque_memory import DequeMemory
from zkl_llmpt_nbptt.nbptt_attention_memory import NbpttAttentionMemory

class DequeNbpttAttentionMemory(torch.nn.Module, NbpttAttentionMemory):
    def __init__(self, capacity: int):
        super().__init__()
        self.deque = DequeMemory(dim=-2, capacity=capacity)

    def append(self, x: torch.Tensor, mask: Optional[torch.Tensor] = None):
        self.deque.append(x, mask)

    def read(self) -> tuple[torch.Tensor, torch.Tensor] | None:
        return self.deque.read()

    def reset(self, mask: Optional[torch.Tensor] = None):
        if mask is not None:
            self.deque.reset(mask)
