TANGZHONGYI commited on
Commit
90d72d1
1 Parent(s): 3d9a538

Delete best

Browse files
best/config.json DELETED
@@ -1,75 +0,0 @@
1
- {
2
- "_name_or_path": "/home/tangzhongyi/tablesumm2insighthub/LM_Unlearning/BART-large-Chinese",
3
- "activation_dropout": 0.1,
4
- "activation_function": "gelu",
5
- "add_bias_logits": false,
6
- "add_final_layer_norm": false,
7
- "architectures": [
8
- "BartForConditionalGeneration"
9
- ],
10
- "attention_dropout": 0.1,
11
- "bos_token_id": 101,
12
- "classif_dropout": 0.1,
13
- "classifier_dropout": 0.0,
14
- "d_model": 1024,
15
- "decoder_attention_heads": 16,
16
- "decoder_ffn_dim": 4096,
17
- "decoder_layerdrop": 0.0,
18
- "decoder_layers": 12,
19
- "decoder_start_token_id": 102,
20
- "dropout": 0.1,
21
- "early_stopping": true,
22
- "encoder_attention_heads": 16,
23
- "encoder_ffn_dim": 4096,
24
- "encoder_layerdrop": 0.0,
25
- "encoder_layers": 12,
26
- "eos_token_id": 102,
27
- "forced_eos_token_id": 102,
28
- "gradient_checkpointing": false,
29
- "id2label": {
30
- "0": "LABEL_0",
31
- "1": "LABEL_1",
32
- "2": "LABEL_2"
33
- },
34
- "init_std": 0.02,
35
- "is_encoder_decoder": true,
36
- "label2id": {
37
- "LABEL_0": 0,
38
- "LABEL_1": 1,
39
- "LABEL_2": 2
40
- },
41
- "max_position_embeddings": 1024,
42
- "model_type": "bart",
43
- "no_repeat_ngram_size": 3,
44
- "normalize_before": false,
45
- "normalize_embedding": true,
46
- "num_beams": 4,
47
- "num_hidden_layers": 12,
48
- "pad_token_id": 0,
49
- "scale_embedding": false,
50
- "task_specific_params": {
51
- "summarization": {
52
- "length_penalty": 1.0,
53
- "max_length": 128,
54
- "min_length": 12,
55
- "num_beams": 4
56
- },
57
- "summarization_cnn": {
58
- "length_penalty": 2.0,
59
- "max_length": 142,
60
- "min_length": 56,
61
- "num_beams": 4
62
- },
63
- "summarization_xsum": {
64
- "length_penalty": 1.0,
65
- "max_length": 62,
66
- "min_length": 11,
67
- "num_beams": 6
68
- }
69
- },
70
- "tokenizer_class": "BertTokenizer",
71
- "torch_dtype": "float32",
72
- "transformers_version": "4.41.1",
73
- "use_cache": true,
74
- "vocab_size": 51271
75
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
best/generation_config.json DELETED
@@ -1,12 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 101,
4
- "decoder_start_token_id": 102,
5
- "early_stopping": true,
6
- "eos_token_id": 102,
7
- "forced_eos_token_id": 102,
8
- "no_repeat_ngram_size": 3,
9
- "num_beams": 4,
10
- "pad_token_id": 0,
11
- "transformers_version": "4.41.1"
12
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
best/model.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:a3f4fc87458ae4e44f7939e39f0bdfed9c86d7e14d3efe191ae3b312c46b0537
3
- size 1629551596
 
 
 
 
best/special_tokens_map.json DELETED
@@ -1,9 +0,0 @@
1
- {
2
- "bos_token": "[CLS]",
3
- "cls_token": "[CLS]",
4
- "eos_token": "[EOS]",
5
- "mask_token": "[MASK]",
6
- "pad_token": "[PAD]",
7
- "sep_token": "[SEP]",
8
- "unk_token": "[UNK]"
9
- }
 
 
 
 
 
 
 
 
 
 
best/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
best/tokenizer_config.json DELETED
@@ -1,67 +0,0 @@
1
- {
2
- "added_tokens_decoder": {
3
- "0": {
4
- "content": "[PAD]",
5
- "lstrip": false,
6
- "normalized": false,
7
- "rstrip": false,
8
- "single_word": false,
9
- "special": true
10
- },
11
- "100": {
12
- "content": "[UNK]",
13
- "lstrip": false,
14
- "normalized": false,
15
- "rstrip": false,
16
- "single_word": false,
17
- "special": true
18
- },
19
- "101": {
20
- "content": "[CLS]",
21
- "lstrip": false,
22
- "normalized": false,
23
- "rstrip": false,
24
- "single_word": false,
25
- "special": true
26
- },
27
- "102": {
28
- "content": "[SEP]",
29
- "lstrip": false,
30
- "normalized": false,
31
- "rstrip": false,
32
- "single_word": false,
33
- "special": true
34
- },
35
- "103": {
36
- "content": "[MASK]",
37
- "lstrip": false,
38
- "normalized": false,
39
- "rstrip": false,
40
- "single_word": false,
41
- "special": true
42
- },
43
- "104": {
44
- "content": "[EOS]",
45
- "lstrip": false,
46
- "normalized": false,
47
- "rstrip": false,
48
- "single_word": false,
49
- "special": true
50
- }
51
- },
52
- "bos_token": "[CLS]",
53
- "clean_up_tokenization_spaces": true,
54
- "cls_token": "[CLS]",
55
- "do_basic_tokenize": true,
56
- "do_lower_case": false,
57
- "eos_token": "[EOS]",
58
- "mask_token": "[MASK]",
59
- "model_max_length": 1000000000000000019884624838656,
60
- "never_split": null,
61
- "pad_token": "[PAD]",
62
- "sep_token": "[SEP]",
63
- "strip_accents": null,
64
- "tokenize_chinese_chars": true,
65
- "tokenizer_class": "BertTokenizer",
66
- "unk_token": "[UNK]"
67
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
best/vocab.txt DELETED
The diff for this file is too large to render. See raw diff