cointegrated commited on
Commit
5b689cb
1 Parent(s): d22f48f

Start distilling LABSE

Browse files
Files changed (3) hide show
  1. config.json +1 -1
  2. pytorch_model.bin +2 -2
  3. tokenizer_config.json +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "cointegrated/tinybert-ru",
3
  "architectures": [
4
  "BertForPreTraining"
5
  ],
1
  {
2
+ "_name_or_path": "cointegrated/rubert-tiny",
3
  "architectures": [
4
  "BertForPreTraining"
5
  ],
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:217191c18de6b4eed08d62b95c68645d640f840d40bdc090624e4f4307a83c70
3
- size 47679078
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d57562a4879aa85d582600c590a052f513c26077282a39bb8dc862e401dba4b1
3
+ size 47679974
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"do_lower_case": false, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "cointegrated/tinybert-ru", "do_basic_tokenize": true, "never_split": null}
1
+ {"do_lower_case": false, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "cointegrated/rubert-tiny", "do_basic_tokenize": true, "never_split": null}