File size: 286 Bytes
1ce5e18
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
# This file is autogenerated by the command `make fix-copies`, do not edit.
from ..utils import DummyObject, requires_backends


SLOW_TO_FAST_CONVERTERS = None


def convert_slow_tokenizer(*args, **kwargs):
    requires_backends(convert_slow_tokenizer, ["sentencepiece", "tokenizers"])