Grounded-Segment-Anything
/
transformers_4_35_0
/utils
/dummy_sentencepiece_and_tokenizers_objects.py
# This file is autogenerated by the command `make fix-copies`, do not edit. | |
from ..utils import DummyObject, requires_backends | |
SLOW_TO_FAST_CONVERTERS = None | |
def convert_slow_tokenizer(*args, **kwargs): | |
requires_backends(convert_slow_tokenizer, ["sentencepiece", "tokenizers"]) | |