Ying7888 commited on
Commit
6bb6880
1 Parent(s): ca39981

Training in progress, step 500

Browse files
README.md CHANGED
@@ -1,6 +1,6 @@
1
  ---
2
- library_name: transformers
3
- tags: []
4
  ---
5
 
6
  # Model Card for Model ID
@@ -15,7 +15,7 @@ tags: []
15
 
16
  <!-- Provide a longer summary of what this model is. -->
17
 
18
- This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated.
19
 
20
  - **Developed by:** [More Information Needed]
21
  - **Funded by [optional]:** [More Information Needed]
@@ -199,3 +199,6 @@ Carbon emissions can be estimated using the [Machine Learning Impact calculator]
199
  [More Information Needed]
200
 
201
 
 
 
 
 
1
  ---
2
+ library_name: peft
3
+ base_model: patrickvonplaten/bert2bert_cnn_daily_mail
4
  ---
5
 
6
  # Model Card for Model ID
 
15
 
16
  <!-- Provide a longer summary of what this model is. -->
17
 
18
+
19
 
20
  - **Developed by:** [More Information Needed]
21
  - **Funded by [optional]:** [More Information Needed]
 
199
  [More Information Needed]
200
 
201
 
202
+ ### Framework versions
203
+
204
+ - PEFT 0.10.1.dev0
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "patrickvonplaten/bert2bert_cnn_daily_mail",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
@@ -16,12 +16,12 @@
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": null,
18
  "peft_type": "LORA",
19
- "r": 16,
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "value",
24
- "query"
25
  ],
26
  "task_type": "SEQ_2_SEQ_LM",
27
  "use_dora": false,
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "facebook/bart-base",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
 
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": null,
18
  "peft_type": "LORA",
19
+ "r": 4,
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "q_proj",
24
+ "v_proj"
25
  ],
26
  "task_type": "SEQ_2_SEQ_LM",
27
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:12e25b9a7a393ac18c9e176f76fb621be2e6733cb85439a7b5eb219ff4286691
3
- size 7099360
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c4b4a80d18fc6dcaa92e33648fdd85a5f4940cab1a819188105c221daaa4bb89
3
+ size 894624
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
runs/Apr13_17-03-32_89a7766e7bdb/events.out.tfevents.1713027835.89a7766e7bdb.1873.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f6d269e0c1d8450961ffe1c2d9f83786c3aff0ccdcadffc835deaf80eac43e96
3
- size 11439
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b06d3ffaca2058e702bf99f1b4ec58b9e8ee6f952d8c1c1cc01810a15d64c98
3
+ size 11793
runs/Apr13_17-03-32_89a7766e7bdb/events.out.tfevents.1713030946.89a7766e7bdb.1873.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7b312fe1009f653dce48fcd163aeb0d0f14c563efb510ee810bba9e83d92a5a
3
+ size 359
runs/Apr13_18-15-34_89a7766e7bdb/events.out.tfevents.1713032135.89a7766e7bdb.1873.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f4ca1ae0ad280af71c67ff4ff67305ff9f313393f6f075e910fdddc2bcc7d90e
3
+ size 5854
runs/Apr13_18-16-06_89a7766e7bdb/events.out.tfevents.1713032171.89a7766e7bdb.1873.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4dfc4371868898024a447a316dd666f14090008aa66da37b26e5a01e5fab820c
3
+ size 6336
special_tokens_map.json CHANGED
@@ -1,9 +1,15 @@
1
  {
2
- "bos_token": "[CLS]",
3
- "cls_token": "[CLS]",
4
- "eos_token": "[SEP]",
5
- "mask_token": "[MASK]",
6
- "pad_token": "[PAD]",
7
- "sep_token": "[SEP]",
8
- "unk_token": "[UNK]"
 
 
 
 
 
 
9
  }
 
1
  {
2
+ "bos_token": "<s>",
3
+ "cls_token": "<s>",
4
+ "eos_token": "</s>",
5
+ "mask_token": {
6
+ "content": "<mask>",
7
+ "lstrip": true,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
+ "pad_token": "<pad>",
13
+ "sep_token": "</s>",
14
+ "unk_token": "<unk>"
15
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,60 +1,58 @@
1
  {
 
2
  "added_tokens_decoder": {
3
  "0": {
4
- "content": "[PAD]",
5
  "lstrip": false,
6
- "normalized": false,
7
  "rstrip": false,
8
  "single_word": false,
9
  "special": true
10
  },
11
- "100": {
12
- "content": "[UNK]",
13
  "lstrip": false,
14
- "normalized": false,
15
  "rstrip": false,
16
  "single_word": false,
17
  "special": true
18
  },
19
- "101": {
20
- "content": "[CLS]",
21
  "lstrip": false,
22
- "normalized": false,
23
  "rstrip": false,
24
  "single_word": false,
25
  "special": true
26
  },
27
- "102": {
28
- "content": "[SEP]",
29
  "lstrip": false,
30
- "normalized": false,
31
  "rstrip": false,
32
  "single_word": false,
33
  "special": true
34
  },
35
- "103": {
36
- "content": "[MASK]",
37
- "lstrip": false,
38
- "normalized": false,
39
  "rstrip": false,
40
  "single_word": false,
41
  "special": true
42
  }
43
  },
44
- "bos_token": "[CLS]",
45
  "clean_up_tokenization_spaces": true,
46
- "cls_token": "[CLS]",
47
  "device_map": "auto",
48
- "do_basic_tokenize": true,
49
- "do_lower_case": true,
50
- "eos_token": "[SEP]",
51
- "mask_token": "[MASK]",
52
- "model_max_length": 512,
53
- "never_split": null,
54
- "pad_token": "[PAD]",
55
- "sep_token": "[SEP]",
56
- "strip_accents": null,
57
- "tokenize_chinese_chars": true,
58
- "tokenizer_class": "BertTokenizer",
59
- "unk_token": "[UNK]"
60
  }
 
1
  {
2
+ "add_prefix_space": false,
3
  "added_tokens_decoder": {
4
  "0": {
5
+ "content": "<s>",
6
  "lstrip": false,
7
+ "normalized": true,
8
  "rstrip": false,
9
  "single_word": false,
10
  "special": true
11
  },
12
+ "1": {
13
+ "content": "<pad>",
14
  "lstrip": false,
15
+ "normalized": true,
16
  "rstrip": false,
17
  "single_word": false,
18
  "special": true
19
  },
20
+ "2": {
21
+ "content": "</s>",
22
  "lstrip": false,
23
+ "normalized": true,
24
  "rstrip": false,
25
  "single_word": false,
26
  "special": true
27
  },
28
+ "3": {
29
+ "content": "<unk>",
30
  "lstrip": false,
31
+ "normalized": true,
32
  "rstrip": false,
33
  "single_word": false,
34
  "special": true
35
  },
36
+ "50264": {
37
+ "content": "<mask>",
38
+ "lstrip": true,
39
+ "normalized": true,
40
  "rstrip": false,
41
  "single_word": false,
42
  "special": true
43
  }
44
  },
45
+ "bos_token": "<s>",
46
  "clean_up_tokenization_spaces": true,
47
+ "cls_token": "<s>",
48
  "device_map": "auto",
49
+ "eos_token": "</s>",
50
+ "errors": "replace",
51
+ "mask_token": "<mask>",
52
+ "model_max_length": 1000000000000000019884624838656,
53
+ "pad_token": "<pad>",
54
+ "sep_token": "</s>",
55
+ "tokenizer_class": "BartTokenizer",
56
+ "trim_offsets": true,
57
+ "unk_token": "<unk>"
 
 
 
58
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7a8e39932266814530278f72b966e6ff504e81438b48e31aecb4ece5603c1699
3
  size 5112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:36c644b433f0c0e785d3bc10f899fc0ad9f7057eaf6e9012c85373f7af903cd9
3
  size 5112
vocab.json ADDED
The diff for this file is too large to render. See raw diff