nicccMnc commited on
Commit
e765287
1 Parent(s): 140c31d

End of training

Browse files
README.md CHANGED
@@ -2,8 +2,6 @@
2
  license: apache-2.0
3
  library_name: peft
4
  tags:
5
- - trl
6
- - sft
7
  - generated_from_trainer
8
  base_model: vilsonrodrigues/falcon-7b-instruct-sharded
9
  model-index:
@@ -36,15 +34,15 @@ More information needed
36
 
37
  The following hyperparameters were used during training:
38
  - learning_rate: 0.0002
39
- - train_batch_size: 16
40
  - eval_batch_size: 8
41
  - seed: 42
42
  - gradient_accumulation_steps: 4
43
- - total_train_batch_size: 64
44
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
45
  - lr_scheduler_type: cosine
46
- - lr_scheduler_warmup_ratio: 0.03
47
- - training_steps: 180
48
  - mixed_precision_training: Native AMP
49
 
50
  ### Training results
 
2
  license: apache-2.0
3
  library_name: peft
4
  tags:
 
 
5
  - generated_from_trainer
6
  base_model: vilsonrodrigues/falcon-7b-instruct-sharded
7
  model-index:
 
34
 
35
  The following hyperparameters were used during training:
36
  - learning_rate: 0.0002
37
+ - train_batch_size: 1
38
  - eval_batch_size: 8
39
  - seed: 42
40
  - gradient_accumulation_steps: 4
41
+ - total_train_batch_size: 4
42
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
43
  - lr_scheduler_type: cosine
44
+ - lr_scheduler_warmup_ratio: 0.05
45
+ - num_epochs: 10
46
  - mixed_precision_training: Native AMP
47
 
48
  ### Training results
adapter_config.json CHANGED
@@ -15,14 +15,11 @@
15
  "megatron_core": "megatron.core",
16
  "modules_to_save": null,
17
  "peft_type": "LORA",
18
- "r": 32,
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "dense_h_to_4h",
23
- "query_key_value",
24
- "dense",
25
- "dense_4h_to_h"
26
  ],
27
  "task_type": "CAUSAL_LM",
28
  "use_rslora": false
 
15
  "megatron_core": "megatron.core",
16
  "modules_to_save": null,
17
  "peft_type": "LORA",
18
+ "r": 16,
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "query_key_value"
 
 
 
23
  ],
24
  "task_type": "CAUSAL_LM",
25
  "use_rslora": false
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:86055f23f572df48893b4b5e540fd12062763ec5aa10b09e786c6fb99e1d8217
3
- size 261131840
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c941424e554bc50c9bceb7f9ee5d873bd619a4565fb7a6d9f127ce97f4cbc8f
3
+ size 18883912
runs/Feb21_08-55-21_512faa0505b2/events.out.tfevents.1708505722.512faa0505b2.5658.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a880b88705358113bac8359738dcfd3af20e4e0e9c6c04e5bfd1177cc110270
3
+ size 12468
runs/Feb21_08-57-40_512faa0505b2/events.out.tfevents.1708505861.512faa0505b2.5658.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7029a668708f6a855cde142f52c03e65cb7201ebab94f047b9aa68ce100821f2
3
+ size 37459
runs/Feb21_09-05-59_512faa0505b2/events.out.tfevents.1708506359.512faa0505b2.5658.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db3fb15d3799f88c872469f10c971552d22e8e0ec6a4481a5d3d1df415ef6234
3
+ size 72982
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:27b4fe67526c2ab95e4dc03472a71781455227508a77bc5eef51bc3f27ee6b59
3
  size 4728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de9d00e9d163f1d242baf776dbaca2ea6a80ee779389307f42d889e63c5d4cbe
3
  size 4728