Ateeb commited on
Commit
bd31917
1 Parent(s): 7bc0007

commit from $USER

Browse files
config.json ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_dropout": 0.0,
3
+ "architectures": [
4
+ "FunnelForSequenceClassification"
5
+ ],
6
+ "attention_dropout": 0.1,
7
+ "attention_type": "relative_shift",
8
+ "block_repeats": [
9
+ 1,
10
+ 1,
11
+ 1
12
+ ],
13
+ "block_sizes": [
14
+ 4,
15
+ 4,
16
+ 4
17
+ ],
18
+ "d_head": 64,
19
+ "d_inner": 3072,
20
+ "d_model": 768,
21
+ "hidden_act": "gelu_new",
22
+ "hidden_dropout": 0.1,
23
+ "id2label": {
24
+ "0": "LABEL_0",
25
+ "1": "LABEL_1",
26
+ "2": "LABEL_2",
27
+ "3": "LABEL_3",
28
+ "4": "LABEL_4",
29
+ "5": "LABEL_5",
30
+ "6": "LABEL_6",
31
+ "7": "LABEL_7",
32
+ "8": "LABEL_8",
33
+ "9": "LABEL_9",
34
+ "10": "LABEL_10",
35
+ "11": "LABEL_11",
36
+ "12": "LABEL_12",
37
+ "13": "LABEL_13",
38
+ "14": "LABEL_14",
39
+ "15": "LABEL_15",
40
+ "16": "LABEL_16",
41
+ "17": "LABEL_17",
42
+ "18": "LABEL_18",
43
+ "19": "LABEL_19",
44
+ "20": "LABEL_20",
45
+ "21": "LABEL_21",
46
+ "22": "LABEL_22",
47
+ "23": "LABEL_23",
48
+ "24": "LABEL_24",
49
+ "25": "LABEL_25",
50
+ "26": "LABEL_26"
51
+ },
52
+ "initializer_range": 0.1,
53
+ "initializer_std": null,
54
+ "label2id": {
55
+ "LABEL_0": 0,
56
+ "LABEL_1": 1,
57
+ "LABEL_10": 10,
58
+ "LABEL_11": 11,
59
+ "LABEL_12": 12,
60
+ "LABEL_13": 13,
61
+ "LABEL_14": 14,
62
+ "LABEL_15": 15,
63
+ "LABEL_16": 16,
64
+ "LABEL_17": 17,
65
+ "LABEL_18": 18,
66
+ "LABEL_19": 19,
67
+ "LABEL_2": 2,
68
+ "LABEL_20": 20,
69
+ "LABEL_21": 21,
70
+ "LABEL_22": 22,
71
+ "LABEL_23": 23,
72
+ "LABEL_24": 24,
73
+ "LABEL_25": 25,
74
+ "LABEL_26": 26,
75
+ "LABEL_3": 3,
76
+ "LABEL_4": 4,
77
+ "LABEL_5": 5,
78
+ "LABEL_6": 6,
79
+ "LABEL_7": 7,
80
+ "LABEL_8": 8,
81
+ "LABEL_9": 9
82
+ },
83
+ "layer_norm_eps": 1e-09,
84
+ "max_position_embeddings": 512,
85
+ "model_type": "funnel",
86
+ "n_head": 12,
87
+ "num_decoder_layers": 2,
88
+ "pool_q_only": true,
89
+ "pooling_type": "mean",
90
+ "rel_attn_type": "factorized",
91
+ "separate_cls": true,
92
+ "transformers_version": "4.3.2",
93
+ "truncate_seq": true,
94
+ "type_vocab_size": 3,
95
+ "vocab_size": 30522
96
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6ff80e092f094bac8d48542ccb09e8c5a32057520f432eedabd4852eba7249ec
3
+ size 464993246
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "<sep>", "pad_token": "<pad>", "cls_token": "<cls>", "mask_token": "<mask>"}
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
1
+ {"do_lower_case": true, "unk_token": "<unk>", "sep_token": "<sep>", "pad_token": "<pad>", "cls_token": "<cls>", "mask_token": "<mask>", "tokenize_chinese_chars": true, "strip_accents": null, "bos_token": "<s>", "eos_token": "</s>", "clean_text": true, "wordpieces_prefix": "##", "model_max_length": 512, "name_or_path": "funnel-transformer/small"}
vocab.txt ADDED
The diff for this file is too large to render. See raw diff