EddyGiusepe commited on
Commit
c944c49
1 Parent(s): ce38e79

Training in progress, epoch 0

Browse files
adapter_config.json CHANGED
@@ -9,7 +9,7 @@
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
  "loftq_config": {},
12
- "lora_alpha": 16,
13
  "lora_dropout": 0.05,
14
  "megatron_config": null,
15
  "megatron_core": "megatron.core",
@@ -19,8 +19,8 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "q_proj",
23
- "v_proj"
24
  ],
25
  "task_type": "CAUSAL_LM"
26
  }
 
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
  "loftq_config": {},
12
+ "lora_alpha": 512,
13
  "lora_dropout": 0.05,
14
  "megatron_config": null,
15
  "megatron_core": "megatron.core",
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "v_proj",
23
+ "q_proj"
24
  ],
25
  "task_type": "CAUSAL_LM"
26
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e8575fe81eafd61a38399ecbaede8bafcb84cfd25fc5b5bd8b455e11def811d0
3
  size 144191184
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80da184aba1b9a70e4d8dfb6cb34945b7d7de2e68a5ac3ca7f965bb81832a17e
3
  size 144191184
runs/Jan27_23-45-38_c42946757f0b/events.out.tfevents.1706399163.c42946757f0b.15107.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d68d544edf3da3d98f3a24585503e42bcc6b9d87a966c1ad4b0d957eae81f60b
3
+ size 20707
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:af53a752bcc8a914c151e58d9eefe9e7f102d67868ae8b3712175192855a2707
3
  size 4664
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:876879dacbb8408d3c01c7f53b4531b0197d48f265273d27d1324029786e99a2
3
  size 4664