Grounded-Segment-Anything / transformers_4_35_0 /utils /dummy_sentencepiece_and_tokenizers_objects.py
liuyizhang
add transformers_4_35_0
1ce5e18
# This file is autogenerated by the command `make fix-copies`, do not edit.
from ..utils import DummyObject, requires_backends
SLOW_TO_FAST_CONVERTERS = None
def convert_slow_tokenizer(*args, **kwargs):
requires_backends(convert_slow_tokenizer, ["sentencepiece", "tokenizers"])