GRPUI commited on
Commit
96e5f12
·
verified ·
1 Parent(s): 4c9d1a6

Upload folder using huggingface_hub

Browse files
Files changed (5) hide show
  1. config.json +12 -12
  2. model.safetensors +2 -2
  3. tokenizer.json +0 -0
  4. tokenizer_config.json +1 -1
  5. vocab.txt +0 -0
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "bert-base-multilingual-uncased",
3
  "architectures": [
4
  "BertForSequenceClassification"
5
  ],
@@ -10,20 +10,20 @@
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
- "0": "\u041f\u043e\u043b\u043d\u043e\u0441\u0442\u044c\u044e \u043d\u0435 \u0443\u0441\u0442\u0440\u043e\u0438\u043b",
14
- "1": "\u0427\u0430\u0441\u0442\u0438\u0447\u043d\u043e \u0443\u0441\u0442\u0440\u043e\u0438\u043b",
15
- "2": "\u0427\u0430\u0441\u0442\u0438\u0447\u043d\u043e \u043d\u0435 \u0443\u0441\u0442\u0440\u043e\u0438\u043b",
16
- "3": "\u041f\u043e\u043b\u043d\u043e\u0441\u0442\u044c\u044e \u0443\u0441\u0442\u0440\u043e\u0438\u043b",
17
- "4": "\u041d\u0435\u0439\u0442\u0440\u0430\u043b\u044c\u043d\u043e"
18
  },
19
  "initializer_range": 0.02,
20
  "intermediate_size": 3072,
21
  "label2id": {
22
- "\u041d\u0435\u0439\u0442\u0440\u0430\u043b\u044c\u043d\u043e": 4,
23
- "\u041f\u043e\u043b\u043d\u043e\u0441\u0442\u044c\u044e \u043d\u0435 \u0443\u0441\u0442\u0440\u043e\u0438\u043b": 0,
24
- "\u041f\u043e\u043b\u043d\u043e\u0441\u0442\u044c\u044e \u0443\u0441\u0442\u0440\u043e\u0438\u043b": 3,
25
- "\u0427\u0430\u0441\u0442\u0438\u0447\u043d\u043e \u043d\u0435 \u0443\u0441\u0442\u0440\u043e\u0438\u043b": 2,
26
- "\u0427\u0430\u0441\u0442\u0438\u0447\u043d\u043e \u0443\u0441\u0442\u0440\u043e\u0438\u043b": 1
27
  },
28
  "layer_norm_eps": 1e-12,
29
  "max_position_embeddings": 512,
@@ -42,5 +42,5 @@
42
  "transformers_version": "4.35.2",
43
  "type_vocab_size": 2,
44
  "use_cache": true,
45
- "vocab_size": 105879
46
  }
 
1
  {
2
+ "_name_or_path": "bert-base-multilingual-cased",
3
  "architectures": [
4
  "BertForSequenceClassification"
5
  ],
 
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "id2label": {
13
+ "0": "\u0427\u0430\u0441\u0442\u0438\u0447\u043d\u043e \u043d\u0435\u0443\u0441\u0442\u0440\u043e\u0438\u043b",
14
+ "1": "\u0411\u043e\u043b\u044c\u0448\u0438\u043d\u0441\u0442\u0432\u043e \u0443\u0441\u0442\u0440\u043e\u0438\u043b\u043e",
15
+ "2": "\u041f\u043e\u043b\u043d\u043e\u0441\u0442\u044c\u044e \u043d\u0435\u0443\u0441\u0442\u0440\u043e\u0438\u043b",
16
+ "3": "\u041d\u0435\u0439\u0442\u0440\u0430\u043b\u044c\u043d\u043e",
17
+ "4": "\u041f\u043e\u043b\u043d\u043e\u0441\u0442\u044c\u044e \u0443\u0441\u0442\u0440\u043e\u0438\u043b\u043e"
18
  },
19
  "initializer_range": 0.02,
20
  "intermediate_size": 3072,
21
  "label2id": {
22
+ "\u0411\u043e\u043b\u044c\u0448\u0438\u043d\u0441\u0442\u0432\u043e \u0443\u0441\u0442\u0440\u043e\u0438\u043b\u043e": 1,
23
+ "\u041d\u0435\u0439\u0442\u0440\u0430\u043b\u044c\u043d\u043e": 3,
24
+ "\u041f\u043e\u043b\u043d\u043e\u0441\u0442\u044c\u044e \u043d\u0435\u0443\u0441\u0442\u0440\u043e\u0438\u043b": 2,
25
+ "\u041f\u043e\u043b\u043d\u043e\u0441\u0442\u044c\u044e \u0443\u0441\u0442\u0440\u043e\u0438\u043b\u043e": 4,
26
+ "\u0427\u0430\u0441\u0442\u0438\u0447\u043d\u043e \u043d\u0435\u0443\u0441\u0442\u0440\u043e\u0438\u043b": 0
27
  },
28
  "layer_norm_eps": 1e-12,
29
  "max_position_embeddings": 512,
 
42
  "transformers_version": "4.35.2",
43
  "type_vocab_size": 2,
44
  "use_cache": true,
45
+ "vocab_size": 119547
46
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e04d7b3c99515e060b81a83b250006778ce98add48e4ddc9fe891cb4828a9c06
3
- size 669464588
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f2a8a2ea91255402410f19d0004395fdc883708a64e1737b440f74eb646d34e1
3
+ size 711452684
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -43,7 +43,7 @@
43
  },
44
  "clean_up_tokenization_spaces": true,
45
  "cls_token": "[CLS]",
46
- "do_lower_case": true,
47
  "mask_token": "[MASK]",
48
  "max_length": 512,
49
  "model_max_length": 512,
 
43
  },
44
  "clean_up_tokenization_spaces": true,
45
  "cls_token": "[CLS]",
46
+ "do_lower_case": false,
47
  "mask_token": "[MASK]",
48
  "max_length": 512,
49
  "model_max_length": 512,
vocab.txt CHANGED
The diff for this file is too large to render. See raw diff