lizihao041 commited on
Commit
0e5952a
1 Parent(s): baf1364

Upload 8 files

Browse files
added_tokens.json ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "<DEPENDENCYTREEDEPTHRATIO_0.25>": 50266,
3
+ "<DEPENDENCYTREEDEPTHRATIO_0.2>": 50265,
4
+ "<DEPENDENCYTREEDEPTHRATIO_0.35>": 50268,
5
+ "<DEPENDENCYTREEDEPTHRATIO_0.3>": 50267,
6
+ "<DEPENDENCYTREEDEPTHRATIO_0.45>": 50270,
7
+ "<DEPENDENCYTREEDEPTHRATIO_0.4>": 50269,
8
+ "<DEPENDENCYTREEDEPTHRATIO_0.55>": 50272,
9
+ "<DEPENDENCYTREEDEPTHRATIO_0.5>": 50271,
10
+ "<DEPENDENCYTREEDEPTHRATIO_0.65>": 50274,
11
+ "<DEPENDENCYTREEDEPTHRATIO_0.6>": 50273,
12
+ "<DEPENDENCYTREEDEPTHRATIO_0.75>": 50276,
13
+ "<DEPENDENCYTREEDEPTHRATIO_0.7>": 50275,
14
+ "<DEPENDENCYTREEDEPTHRATIO_0.85>": 50278,
15
+ "<DEPENDENCYTREEDEPTHRATIO_0.8>": 50277,
16
+ "<DEPENDENCYTREEDEPTHRATIO_0.95>": 50280,
17
+ "<DEPENDENCYTREEDEPTHRATIO_0.9>": 50279,
18
+ "<DEPENDENCYTREEDEPTHRATIO_1.05>": 50282,
19
+ "<DEPENDENCYTREEDEPTHRATIO_1.0>": 50281,
20
+ "<DEPENDENCYTREEDEPTHRATIO_1.15>": 50284,
21
+ "<DEPENDENCYTREEDEPTHRATIO_1.1>": 50283,
22
+ "<DEPENDENCYTREEDEPTHRATIO_1.25>": 50286,
23
+ "<DEPENDENCYTREEDEPTHRATIO_1.2>": 50285,
24
+ "<DEPENDENCYTREEDEPTHRATIO_1.35>": 50288,
25
+ "<DEPENDENCYTREEDEPTHRATIO_1.3>": 50287,
26
+ "<DEPENDENCYTREEDEPTHRATIO_1.45>": 50290,
27
+ "<DEPENDENCYTREEDEPTHRATIO_1.4>": 50289,
28
+ "<DEPENDENCYTREEDEPTHRATIO_1.5>": 50291,
29
+ "<LENGTHRATIO_0.25>": 50320,
30
+ "<LENGTHRATIO_0.2>": 50319,
31
+ "<LENGTHRATIO_0.35>": 50322,
32
+ "<LENGTHRATIO_0.3>": 50321,
33
+ "<LENGTHRATIO_0.45>": 50324,
34
+ "<LENGTHRATIO_0.4>": 50323,
35
+ "<LENGTHRATIO_0.55>": 50326,
36
+ "<LENGTHRATIO_0.5>": 50325,
37
+ "<LENGTHRATIO_0.65>": 50328,
38
+ "<LENGTHRATIO_0.6>": 50327,
39
+ "<LENGTHRATIO_0.75>": 50330,
40
+ "<LENGTHRATIO_0.7>": 50329,
41
+ "<LENGTHRATIO_0.85>": 50332,
42
+ "<LENGTHRATIO_0.8>": 50331,
43
+ "<LENGTHRATIO_0.95>": 50334,
44
+ "<LENGTHRATIO_0.9>": 50333,
45
+ "<LENGTHRATIO_1.05>": 50336,
46
+ "<LENGTHRATIO_1.0>": 50335,
47
+ "<LENGTHRATIO_1.15>": 50338,
48
+ "<LENGTHRATIO_1.1>": 50337,
49
+ "<LENGTHRATIO_1.25>": 50340,
50
+ "<LENGTHRATIO_1.2>": 50339,
51
+ "<LENGTHRATIO_1.35>": 50342,
52
+ "<LENGTHRATIO_1.3>": 50341,
53
+ "<LENGTHRATIO_1.45>": 50344,
54
+ "<LENGTHRATIO_1.4>": 50343,
55
+ "<LENGTHRATIO_1.5>": 50345,
56
+ "<REPLACEONLYLEVENSHTEIN_0.25>": 50347,
57
+ "<REPLACEONLYLEVENSHTEIN_0.2>": 50346,
58
+ "<REPLACEONLYLEVENSHTEIN_0.35>": 50349,
59
+ "<REPLACEONLYLEVENSHTEIN_0.3>": 50348,
60
+ "<REPLACEONLYLEVENSHTEIN_0.45>": 50351,
61
+ "<REPLACEONLYLEVENSHTEIN_0.4>": 50350,
62
+ "<REPLACEONLYLEVENSHTEIN_0.55>": 50353,
63
+ "<REPLACEONLYLEVENSHTEIN_0.5>": 50352,
64
+ "<REPLACEONLYLEVENSHTEIN_0.65>": 50355,
65
+ "<REPLACEONLYLEVENSHTEIN_0.6>": 50354,
66
+ "<REPLACEONLYLEVENSHTEIN_0.75>": 50357,
67
+ "<REPLACEONLYLEVENSHTEIN_0.7>": 50356,
68
+ "<REPLACEONLYLEVENSHTEIN_0.85>": 50359,
69
+ "<REPLACEONLYLEVENSHTEIN_0.8>": 50358,
70
+ "<REPLACEONLYLEVENSHTEIN_0.95>": 50361,
71
+ "<REPLACEONLYLEVENSHTEIN_0.9>": 50360,
72
+ "<REPLACEONLYLEVENSHTEIN_1.0>": 50362,
73
+ "<WORDRANKRATIO_0.25>": 50293,
74
+ "<WORDRANKRATIO_0.2>": 50292,
75
+ "<WORDRANKRATIO_0.35>": 50295,
76
+ "<WORDRANKRATIO_0.3>": 50294,
77
+ "<WORDRANKRATIO_0.45>": 50297,
78
+ "<WORDRANKRATIO_0.4>": 50296,
79
+ "<WORDRANKRATIO_0.55>": 50299,
80
+ "<WORDRANKRATIO_0.5>": 50298,
81
+ "<WORDRANKRATIO_0.65>": 50301,
82
+ "<WORDRANKRATIO_0.6>": 50300,
83
+ "<WORDRANKRATIO_0.75>": 50303,
84
+ "<WORDRANKRATIO_0.7>": 50302,
85
+ "<WORDRANKRATIO_0.85>": 50305,
86
+ "<WORDRANKRATIO_0.8>": 50304,
87
+ "<WORDRANKRATIO_0.95>": 50307,
88
+ "<WORDRANKRATIO_0.9>": 50306,
89
+ "<WORDRANKRATIO_1.05>": 50309,
90
+ "<WORDRANKRATIO_1.0>": 50308,
91
+ "<WORDRANKRATIO_1.15>": 50311,
92
+ "<WORDRANKRATIO_1.1>": 50310,
93
+ "<WORDRANKRATIO_1.25>": 50313,
94
+ "<WORDRANKRATIO_1.2>": 50312,
95
+ "<WORDRANKRATIO_1.35>": 50315,
96
+ "<WORDRANKRATIO_1.3>": 50314,
97
+ "<WORDRANKRATIO_1.45>": 50317,
98
+ "<WORDRANKRATIO_1.4>": 50316,
99
+ "<WORDRANKRATIO_1.5>": 50318
100
+ }
config.json ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "facebook/bart-base",
3
+ "activation_dropout": 0.1,
4
+ "activation_function": "gelu",
5
+ "add_bias_logits": false,
6
+ "add_final_layer_norm": false,
7
+ "architectures": [
8
+ "BartForConditionalGeneration"
9
+ ],
10
+ "attention_dropout": 0.1,
11
+ "bos_token_id": 0,
12
+ "classif_dropout": 0.1,
13
+ "classifier_dropout": 0.0,
14
+ "d_model": 768,
15
+ "decoder_attention_heads": 12,
16
+ "decoder_ffn_dim": 3072,
17
+ "decoder_layerdrop": 0.0,
18
+ "decoder_layers": 6,
19
+ "decoder_start_token_id": 2,
20
+ "dropout": 0.1,
21
+ "early_stopping": true,
22
+ "encoder_attention_heads": 12,
23
+ "encoder_ffn_dim": 3072,
24
+ "encoder_layerdrop": 0.0,
25
+ "encoder_layers": 6,
26
+ "eos_token_id": 2,
27
+ "forced_bos_token_id": 0,
28
+ "forced_eos_token_id": 2,
29
+ "gradient_checkpointing": false,
30
+ "id2label": {
31
+ "0": "LABEL_0",
32
+ "1": "LABEL_1",
33
+ "2": "LABEL_2"
34
+ },
35
+ "init_std": 0.02,
36
+ "is_encoder_decoder": true,
37
+ "label2id": {
38
+ "LABEL_0": 0,
39
+ "LABEL_1": 1,
40
+ "LABEL_2": 2
41
+ },
42
+ "max_position_embeddings": 1024,
43
+ "model_type": "bart",
44
+ "no_repeat_ngram_size": 3,
45
+ "normalize_before": false,
46
+ "normalize_embedding": true,
47
+ "num_beams": 4,
48
+ "num_hidden_layers": 6,
49
+ "pad_token_id": 1,
50
+ "scale_embedding": false,
51
+ "task_specific_params": {
52
+ "summarization": {
53
+ "length_penalty": 1.0,
54
+ "max_length": 128,
55
+ "min_length": 12,
56
+ "num_beams": 4
57
+ },
58
+ "summarization_cnn": {
59
+ "length_penalty": 2.0,
60
+ "max_length": 142,
61
+ "min_length": 56,
62
+ "num_beams": 4
63
+ },
64
+ "summarization_xsum": {
65
+ "length_penalty": 1.0,
66
+ "max_length": 62,
67
+ "min_length": 11,
68
+ "num_beams": 6
69
+ }
70
+ },
71
+ "torch_dtype": "float32",
72
+ "transformers_version": "4.20.1",
73
+ "use_cache": true,
74
+ "vocab_size": 50363
75
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:854e48e4ad357f8bee9111b8be084d22e8562abd417937981ef8598a626c4a0e
3
+ size 558270585
special_tokens_map.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "cls_token": "<s>",
4
+ "eos_token": "</s>",
5
+ "mask_token": {
6
+ "content": "<mask>",
7
+ "lstrip": true,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
+ "pad_token": "<pad>",
13
+ "sep_token": "</s>",
14
+ "unk_token": "<unk>"
15
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": false,
3
+ "bos_token": "<s>",
4
+ "cls_token": "<s>",
5
+ "eos_token": "</s>",
6
+ "errors": "replace",
7
+ "mask_token": "<mask>",
8
+ "model_max_length": 1024,
9
+ "name_or_path": "facebook/bart-base",
10
+ "pad_token": "<pad>",
11
+ "sep_token": "</s>",
12
+ "special_tokens_map_file": null,
13
+ "tokenizer_class": "BartTokenizer",
14
+ "trim_offsets": true,
15
+ "unk_token": "<unk>"
16
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff