hansa15100 commited on
Commit
72ac17c
1 Parent(s): c55c762

Training in progress, step 1

Browse files
adapter_config.json CHANGED
@@ -10,13 +10,13 @@
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
- "lora_alpha": 8,
14
  "lora_dropout": 0.0,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": null,
18
  "peft_type": "LORA",
19
- "r": 8,
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
+ "lora_alpha": 16,
14
  "lora_dropout": 0.0,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": null,
18
  "peft_type": "LORA",
19
+ "r": 16,
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0df49fbc7b20b3c4b0aa2df17fb14dbd7d61efbc16dfbdc803d7be8f11dcbaeb
3
- size 45258384
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b4cc4828018d5135610d4d8e1a1283a0e674616d100dd3c916b0805dd8c028a
3
+ size 90454136
runs/Jun24_12-40-04_aid-iad-bm-gpu4-8-ad1-002/events.out.tfevents.1719232811.aid-iad-bm-gpu4-8-ad1-002.1304290.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4088a839ab3f90e137c665b25933e26c0f44edd725ec3cd0763005b2a3a288ca
3
+ size 5673
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ff6f3742646f4fedfdb3f22968f02ecffd8d57640c096ba0debd3d90715a3910
3
  size 5112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3273681faa0ee376ae682752f7a607bb6f6a450a39f28a99014bac918e364677
3
  size 5112