from typing import Any, Dict, List, Tuple
import torch

class WordVecEncoderBase(torch.nn.Module):
    def __init__(self, name:str, embedding_dim:int) -> None:
        super().__init__()
        self.name = name
        self.embedding_dim = embedding_dim
        self.sentence_stem_tensor_cache:Dict[str, torch.Tensor] = dict()

    def batch_str_to_input_tensor_bert_style_cached(self, batch_sentences:List[str]) -> torch.Tensor:
        raise NotImplementedError('必须实现该方法')

    def forward(self, input_tensor:torch.Tensor, batch_size:int):
        raise NotImplementedError('必须实现该方法')

    def collate_batch_tensor(self, batch_sentences:List[str]):
        raise NotImplementedError('必须实现该方法')

    def sentence_to_tensor(self, sentence:str) -> torch.Tensor:
        raise NotImplementedError('必须实现该方法')

    def empty_chars_tensor(self, num_chars:int) -> torch.Tensor:
        raise NotImplementedError('必须实现该方法')

    def batch_str_to_input_tensor_bert_style_cached(self, batch_sentences:List[str]) -> torch.Tensor:
        lens = [len(s) for s in batch_sentences]
        batch_len = max(lens) + 2
        batch_tensor = []
        for sentence in batch_sentences:
            # [CLS] 和 字符串内容从缓存中取
            if sentence not in self.sentence_stem_tensor_cache:
                sentence_stem = []
                # 与 Bert 的编码方式一致, 在字符串头添加 [CLS], 字符串尾添加 [SEP], 然后补齐
                # [CLS]
                sentence_stem.append(self.empty_chars_tensor(1))
                # 字符串内容
                sentence_stem.append(self.sentence_to_tensor(sentence))
                sentence_stem = torch.cat(sentence_stem)
                self.sentence_stem_tensor_cache[sentence] = sentence_stem
            sentence_stem_tensor = self.sentence_stem_tensor_cache[sentence]
            # [SEP] + [PAD] 现场计算
            sep_and_pad_tesor = self.empty_chars_tensor(batch_len - len(sentence) - 1)
            sentence_tensor = torch.cat((sentence_stem_tensor, sep_and_pad_tesor))
            batch_tensor.append(sentence_tensor)
        batch_tensor = torch.stack(batch_tensor)
        return batch_tensor