Thesis-Demo / transformers_4_35_0 /utils /dummy_sentencepiece_and_tokenizers_objects.py
xuan2k's picture
Dev demo app
f8f5cdf
raw
history blame contribute delete
No virus
286 Bytes
# This file is autogenerated by the command `make fix-copies`, do not edit.
from ..utils import DummyObject, requires_backends
SLOW_TO_FAST_CONVERTERS = None
def convert_slow_tokenizer(*args, **kwargs):
requires_backends(convert_slow_tokenizer, ["sentencepiece", "tokenizers"])