Nehc commited on
Commit
feef8ee
1 Parent(s): d3e025c

new version

Browse files
Files changed (5) hide show
  1. config.json +5 -4
  2. pytorch_model.bin +2 -2
  3. tokenizer.json +0 -0
  4. tokenizer_config.json +1 -1
  5. vocab.txt +0 -0
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "DeepPavlov/rubert-base-cased",
3
  "architectures": [
4
  "BertForSequenceClassification"
5
  ],
@@ -229,7 +229,8 @@
229
  "216": "LABEL_216",
230
  "217": "LABEL_217",
231
  "218": "LABEL_218",
232
- "219": "LABEL_219"
 
233
  },
234
  "initializer_range": 0.02,
235
  "intermediate_size": 3072,
@@ -370,6 +371,7 @@
370
  "LABEL_218": 218,
371
  "LABEL_219": 219,
372
  "LABEL_22": 22,
 
373
  "LABEL_23": 23,
374
  "LABEL_24": 24,
375
  "LABEL_25": 25,
@@ -460,7 +462,6 @@
460
  "model_type": "bert",
461
  "num_attention_heads": 12,
462
  "num_hidden_layers": 12,
463
- "output_past": true,
464
  "pad_token_id": 0,
465
  "pooler_fc_size": 768,
466
  "pooler_num_attention_heads": 12,
@@ -473,5 +474,5 @@
473
  "transformers_version": "4.16.2",
474
  "type_vocab_size": 2,
475
  "use_cache": true,
476
- "vocab_size": 119547
477
  }
 
1
  {
2
+ "_name_or_path": "test_trainer_f/checkpoint-340",
3
  "architectures": [
4
  "BertForSequenceClassification"
5
  ],
 
229
  "216": "LABEL_216",
230
  "217": "LABEL_217",
231
  "218": "LABEL_218",
232
+ "219": "LABEL_219",
233
+ "220": "LABEL_220"
234
  },
235
  "initializer_range": 0.02,
236
  "intermediate_size": 3072,
 
371
  "LABEL_218": 218,
372
  "LABEL_219": 219,
373
  "LABEL_22": 22,
374
+ "LABEL_220": 220,
375
  "LABEL_23": 23,
376
  "LABEL_24": 24,
377
  "LABEL_25": 25,
 
462
  "model_type": "bert",
463
  "num_attention_heads": 12,
464
  "num_hidden_layers": 12,
 
465
  "pad_token_id": 0,
466
  "pooler_fc_size": 768,
467
  "pooler_num_attention_heads": 12,
 
474
  "transformers_version": "4.16.2",
475
  "type_vocab_size": 2,
476
  "use_cache": true,
477
+ "vocab_size": 105879
478
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3823020c69d7150f4672f0b6d3fdb726c8db25b74ce6b31ff68191bf92c1dd46
3
- size 712174573
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1812fbc08868523091fddedee792d9dc85efa69e359efc22315c05c570622aa5
3
+ size 670186733
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"do_lower_case": false, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "special_tokens_map_file": "E:\\HF_HOME\\transformers\\853440ef3f696efb168918bd6c8489323dfaad3a7b308974fa2669336a00d203.dd8bd9bfd3664b530ea4e645105f557769387b3da9f79bdb55ed556bdd80611d", "name_or_path": "DeepPavlov/rubert-base-cased", "do_basic_tokenize": true, "never_split": null, "tokenizer_class": "BertTokenizer"}
 
1
+ {"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "special_tokens_map_file": null, "name_or_path": "bert-base-multilingual-uncased", "tokenizer_class": "BertTokenizer"}
vocab.txt CHANGED
The diff for this file is too large to render. See raw diff