Joshua Lochner commited on
Commit
6648694
1 Parent(s): b351b75

Remove sequence classification for bert-base-multilingual models

Browse files
quantized/bert-base-multilingual-cased/sequence-classification/config.json DELETED
@@ -1,30 +0,0 @@
1
- {
2
- "_name_or_path": "bert-base-multilingual-cased",
3
- "architectures": [
4
- "BertForMaskedLM"
5
- ],
6
- "attention_probs_dropout_prob": 0.1,
7
- "classifier_dropout": null,
8
- "directionality": "bidi",
9
- "hidden_act": "gelu",
10
- "hidden_dropout_prob": 0.1,
11
- "hidden_size": 768,
12
- "initializer_range": 0.02,
13
- "intermediate_size": 3072,
14
- "layer_norm_eps": 1e-12,
15
- "max_position_embeddings": 512,
16
- "model_type": "bert",
17
- "num_attention_heads": 12,
18
- "num_hidden_layers": 12,
19
- "pad_token_id": 0,
20
- "pooler_fc_size": 768,
21
- "pooler_num_attention_heads": 12,
22
- "pooler_num_fc_layers": 3,
23
- "pooler_size_per_head": 128,
24
- "pooler_type": "first_token_transform",
25
- "position_embedding_type": "absolute",
26
- "transformers_version": "4.26.1",
27
- "type_vocab_size": 2,
28
- "use_cache": true,
29
- "vocab_size": 119547
30
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
quantized/bert-base-multilingual-cased/sequence-classification/model.onnx DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:30b54e696722bc9cd526016cd7d215dfed320e15f63fa14aba2013e1b6c04b02
3
- size 711528862
 
 
 
quantized/bert-base-multilingual-cased/sequence-classification/special_tokens_map.json DELETED
@@ -1,7 +0,0 @@
1
- {
2
- "cls_token": "[CLS]",
3
- "mask_token": "[MASK]",
4
- "pad_token": "[PAD]",
5
- "sep_token": "[SEP]",
6
- "unk_token": "[UNK]"
7
- }
 
 
 
 
 
 
 
quantized/bert-base-multilingual-cased/sequence-classification/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
quantized/bert-base-multilingual-cased/sequence-classification/tokenizer_config.json DELETED
@@ -1,14 +0,0 @@
1
- {
2
- "cls_token": "[CLS]",
3
- "do_lower_case": false,
4
- "mask_token": "[MASK]",
5
- "model_max_length": 512,
6
- "name_or_path": "bert-base-multilingual-cased",
7
- "pad_token": "[PAD]",
8
- "sep_token": "[SEP]",
9
- "special_tokens_map_file": null,
10
- "strip_accents": null,
11
- "tokenize_chinese_chars": true,
12
- "tokenizer_class": "BertTokenizer",
13
- "unk_token": "[UNK]"
14
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
quantized/bert-base-multilingual-cased/sequence-classification/vocab.txt DELETED
The diff for this file is too large to render. See raw diff
quantized/bert-base-multilingual-uncased/sequence-classification/config.json DELETED
@@ -1,30 +0,0 @@
1
- {
2
- "_name_or_path": "bert-base-multilingual-uncased",
3
- "architectures": [
4
- "BertForMaskedLM"
5
- ],
6
- "attention_probs_dropout_prob": 0.1,
7
- "classifier_dropout": null,
8
- "directionality": "bidi",
9
- "hidden_act": "gelu",
10
- "hidden_dropout_prob": 0.1,
11
- "hidden_size": 768,
12
- "initializer_range": 0.02,
13
- "intermediate_size": 3072,
14
- "layer_norm_eps": 1e-12,
15
- "max_position_embeddings": 512,
16
- "model_type": "bert",
17
- "num_attention_heads": 12,
18
- "num_hidden_layers": 12,
19
- "pad_token_id": 0,
20
- "pooler_fc_size": 768,
21
- "pooler_num_attention_heads": 12,
22
- "pooler_num_fc_layers": 3,
23
- "pooler_size_per_head": 128,
24
- "pooler_type": "first_token_transform",
25
- "position_embedding_type": "absolute",
26
- "transformers_version": "4.26.1",
27
- "type_vocab_size": 2,
28
- "use_cache": true,
29
- "vocab_size": 105879
30
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
quantized/bert-base-multilingual-uncased/sequence-classification/model.onnx DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:bee715fe94ecc95e65161aaf938af33e8a4323f8f50d66465f5b06f96d286c7e
3
- size 170127027
 
 
 
quantized/bert-base-multilingual-uncased/sequence-classification/special_tokens_map.json DELETED
@@ -1,7 +0,0 @@
1
- {
2
- "cls_token": "[CLS]",
3
- "mask_token": "[MASK]",
4
- "pad_token": "[PAD]",
5
- "sep_token": "[SEP]",
6
- "unk_token": "[UNK]"
7
- }
 
 
 
 
 
 
 
quantized/bert-base-multilingual-uncased/sequence-classification/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
quantized/bert-base-multilingual-uncased/sequence-classification/tokenizer_config.json DELETED
@@ -1,14 +0,0 @@
1
- {
2
- "cls_token": "[CLS]",
3
- "do_lower_case": true,
4
- "mask_token": "[MASK]",
5
- "model_max_length": 512,
6
- "name_or_path": "bert-base-multilingual-uncased",
7
- "pad_token": "[PAD]",
8
- "sep_token": "[SEP]",
9
- "special_tokens_map_file": null,
10
- "strip_accents": null,
11
- "tokenize_chinese_chars": true,
12
- "tokenizer_class": "BertTokenizer",
13
- "unk_token": "[UNK]"
14
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
quantized/bert-base-multilingual-uncased/sequence-classification/vocab.txt DELETED
The diff for this file is too large to render. See raw diff