asahi417 commited on
Commit
4c0a160
1 Parent(s): 25c9daf

model update

Browse files
added_tokens.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"<hl>": 50265}
config.json ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "lmqg_output/optimized_no_passage/bart_base_squad/model_ohllrv/epoch_2",
3
+ "activation_dropout": 0.1,
4
+ "activation_function": "gelu",
5
+ "add_bias_logits": false,
6
+ "add_final_layer_norm": false,
7
+ "architectures": [
8
+ "BartForConditionalGeneration"
9
+ ],
10
+ "attention_dropout": 0.1,
11
+ "bos_token_id": 0,
12
+ "classif_dropout": 0.1,
13
+ "classifier_dropout": 0.0,
14
+ "d_model": 768,
15
+ "decoder_attention_heads": 12,
16
+ "decoder_ffn_dim": 3072,
17
+ "decoder_layerdrop": 0.0,
18
+ "decoder_layers": 6,
19
+ "decoder_start_token_id": 2,
20
+ "dropout": 0.1,
21
+ "early_stopping": true,
22
+ "encoder_attention_heads": 12,
23
+ "encoder_ffn_dim": 3072,
24
+ "encoder_layerdrop": 0.0,
25
+ "encoder_layers": 6,
26
+ "eos_token_id": 2,
27
+ "forced_eos_token_id": 2,
28
+ "gradient_checkpointing": false,
29
+ "id2label": {
30
+ "0": "LABEL_0",
31
+ "1": "LABEL_1",
32
+ "2": "LABEL_2"
33
+ },
34
+ "init_std": 0.02,
35
+ "is_encoder_decoder": true,
36
+ "label2id": {
37
+ "LABEL_0": 0,
38
+ "LABEL_1": 1,
39
+ "LABEL_2": 2
40
+ },
41
+ "max_position_embeddings": 1024,
42
+ "model_type": "bart",
43
+ "no_repeat_ngram_size": 3,
44
+ "normalize_before": false,
45
+ "normalize_embedding": true,
46
+ "num_beams": 4,
47
+ "num_hidden_layers": 6,
48
+ "pad_token_id": 1,
49
+ "scale_embedding": false,
50
+ "task_specific_params": {
51
+ "summarization": {
52
+ "length_penalty": 1.0,
53
+ "max_length": 128,
54
+ "min_length": 12,
55
+ "num_beams": 4
56
+ },
57
+ "summarization_cnn": {
58
+ "length_penalty": 2.0,
59
+ "max_length": 142,
60
+ "min_length": 56,
61
+ "num_beams": 4
62
+ },
63
+ "summarization_xsum": {
64
+ "length_penalty": 1.0,
65
+ "max_length": 62,
66
+ "min_length": 11,
67
+ "num_beams": 6
68
+ }
69
+ },
70
+ "transformers_version": "4.8.2",
71
+ "use_cache": true,
72
+ "vocab_size": 50266
73
+ }
eval/metric.first.answer.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"dev": {"Bleu_1": 0.5174429781484685, "Bleu_2": 0.3669630492924728, "Bleu_3": 0.2821937434749587, "Bleu_4": 0.22417497287133004, "METEOR": 0.24801161892587523, "ROUGE_L": 0.5026662406435107}, "test": {"Bleu_1": 0.48315891994758353, "Bleu_2": 0.3325079967523817, "Bleu_3": 0.24870685676854268, "Bleu_4": 0.19174361769514225, "METEOR": 0.23137104841320796, "ROUGE_L": 0.47651920166883005}}
eval/metric.first.sentence.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"dev": {"Bleu_1": 0.5669235801226897, "Bleu_2": 0.41130225651020574, "Bleu_3": 0.32148245069238607, "Bleu_4": 0.258882449288271, "METEOR": 0.2638644241160143, "ROUGE_L": 0.5283539098256222}, "test": {"Bleu_1": 0.5558530064840774, "Bleu_2": 0.3958292171488584, "Bleu_3": 0.30182291836615555, "Bleu_4": 0.2361002968730591, "METEOR": 0.2518149650657295, "ROUGE_L": 0.5126623311228001}}
eval/samples.dev.hyp.txt ADDED
The diff for this file is too large to render. See raw diff
 
eval/samples.test.hyp.txt ADDED
The diff for this file is too large to render. See raw diff
 
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ed732cfaf7685d5f8201b509450d56395e92cb734d5f3833ca7b3d5733334993
3
+ size 557982265
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"bos_token": "<s>", "eos_token": "</s>", "unk_token": "<unk>", "sep_token": "</s>", "pad_token": "<pad>", "cls_token": "<s>", "mask_token": {"content": "<mask>", "single_word": false, "lstrip": true, "rstrip": false, "normalized": false}, "additional_special_tokens": ["<hl>"]}
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "<unk>", "bos_token": "<s>", "eos_token": "</s>", "add_prefix_space": false, "errors": "replace", "sep_token": "</s>", "cls_token": "<s>", "pad_token": "<pad>", "mask_token": "<mask>", "model_max_length": 1024, "name_or_path": "lmqg_output/optimized_no_passage/bart_base_squad/model_ohllrv/epoch_2", "special_tokens_map_file": "lmqg_output/optimized_no_passage/bart_base_squad/model_ohllrv/epoch_2/special_tokens_map.json", "tokenizer_class": "BartTokenizer"}
vocab.json ADDED
The diff for this file is too large to render. See raw diff