from typing import Optional, Union
from transformers import (AutoTokenizer, PreTrainedTokenizer,
                          PreTrainedTokenizerFast)


tokenizer = AutoTokenizer.from_pretrained(
    "/home/yangxianpku/models/LLM-Research/Meta-Llama-3.1-8B-Instruct",
    trust_remote_code=True,
)

print(tokenizer)

AnyTokenizer = Union[PreTrainedTokenizer, PreTrainedTokenizerFast]


def get_cached_tokenizer(tokenizer: AnyTokenizer) -> AnyTokenizer:
    """Get tokenizer with cached properties.

    This will patch the tokenizer object in place.

    By default, transformers will recompute multiple tokenizer properties
    each time they are called, leading to a significant slowdown. This
    function caches these properties for faster access."""

    
    tokenizer_all_special_ids = set(tokenizer.all_special_ids)
    # print(tokenizer_all_special_ids)
    # print(dir(tokenizer))
    #! 特殊token的名字和id
    # [128000, 128009] ['<|begin_of_text|>', '<|eot_id|>']
    # print(tokenizer.all_special_ids, tokenizer.all_special_tokens)


    tokenizer_all_special_tokens_extended = (
        tokenizer.all_special_tokens_extended)
    # [
    #   AddedToken("<|begin_of_text|>", rstrip=False, lstrip=False, single_word=False, normalized=False, special=True), 
    #   AddedToken("<|eot_id|>", rstrip=False, lstrip=False, single_word=False, normalized=False, special=True)
    # ]
    # <class 'tokenizers.AddedToken'>    #! 可以理解为自定义标记
    # print(tokenizer_all_special_tokens_extended)
    tokenizer_all_special_tokens = set(tokenizer.all_special_tokens)
    tokenizer_len = len(tokenizer)       #! 获取词汇表
    max_token_id  = max(tokenizer.get_vocab().values())   #! 词汇表是一个{token: token_id}的字典

    class CachedTokenizer(tokenizer.__class__):  # type: ignore

        @property
        def all_special_ids(self):
            return tokenizer_all_special_ids

        @property
        def all_special_tokens(self):
            return tokenizer_all_special_tokens

        @property
        def all_special_tokens_extended(self):
            return tokenizer_all_special_tokens_extended

        @property
        def max_token_id(self):
            return max_token_id

        def __len__(self):
            return tokenizer_len

    CachedTokenizer.__name__ = f"Cached{tokenizer.__class__.__name__}"

    tokenizer.__class__ = CachedTokenizer
    return tokenizer


tokenizer = get_cached_tokenizer(tokenizer)