Spaces:
Running
Running
update
Browse files
vocab.py
CHANGED
@@ -386,7 +386,8 @@ assert len(set([config.name_or_path.split("/")[-1] for config in _all_tokenizer_
|
|
386 |
class TokenizerFactory:
|
387 |
|
388 |
def __init__(self):
|
389 |
-
self.all_tokenizer_configs = sorted(_all_tokenizer_config, key=lambda k: k.name_or_path)
|
|
|
390 |
self.all_tokenizer_names = [config.name_or_path for config in self.all_tokenizer_configs]
|
391 |
self.name_to_config_list = [
|
392 |
{config.name_or_path: config for config in self.all_tokenizer_configs},
|
|
|
386 |
class TokenizerFactory:
|
387 |
|
388 |
def __init__(self):
|
389 |
+
# self.all_tokenizer_configs = sorted(_all_tokenizer_config, key=lambda k: k.name_or_path)
|
390 |
+
self.all_tokenizer_configs = sorted(_all_tokenizer_config, key=lambda k: k.name_display)
|
391 |
self.all_tokenizer_names = [config.name_or_path for config in self.all_tokenizer_configs]
|
392 |
self.name_to_config_list = [
|
393 |
{config.name_or_path: config for config in self.all_tokenizer_configs},
|