TinyGPT-V / modified /utils /dummy_sentencepiece_and_tokenizers_objects.py
Li Zhaoxu
init
122057f
raw
history blame contribute delete
286 Bytes
# This file is autogenerated by the command `make fix-copies`, do not edit.
from ..utils import DummyObject, requires_backends
SLOW_TO_FAST_CONVERTERS = None
def convert_slow_tokenizer(*args, **kwargs):
requires_backends(convert_slow_tokenizer, ["sentencepiece", "tokenizers"])