aengusl commited on
Commit
df0bf71
1 Parent(s): 43d0ed9

Model save

Browse files
adapter_config.json CHANGED
@@ -20,11 +20,11 @@
20
  "revision": null,
21
  "target_modules": [
22
  "q_proj",
23
- "k_proj",
24
  "v_proj",
25
- "o_proj",
26
  "up_proj",
27
- "down_proj"
 
 
28
  ],
29
  "task_type": "CAUSAL_LM",
30
  "use_rslora": false
 
20
  "revision": null,
21
  "target_modules": [
22
  "q_proj",
 
23
  "v_proj",
 
24
  "up_proj",
25
+ "down_proj",
26
+ "k_proj",
27
+ "o_proj"
28
  ],
29
  "task_type": "CAUSAL_LM",
30
  "use_rslora": false
runs/May17_12-39-09_c842148f8542/events.out.tfevents.1715949643.c842148f8542.296765.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fdd9dbcd721a0e9824251879d6a7c2734b4193d46e06c3a792f11ee5023486b0
3
+ size 4308
step_0/adapter_config.json CHANGED
@@ -20,11 +20,11 @@
20
  "revision": null,
21
  "target_modules": [
22
  "q_proj",
23
- "k_proj",
24
  "v_proj",
25
- "o_proj",
26
  "up_proj",
27
- "down_proj"
 
 
28
  ],
29
  "task_type": "CAUSAL_LM",
30
  "use_rslora": false
 
20
  "revision": null,
21
  "target_modules": [
22
  "q_proj",
 
23
  "v_proj",
 
24
  "up_proj",
25
+ "down_proj",
26
+ "k_proj",
27
+ "o_proj"
28
  ],
29
  "task_type": "CAUSAL_LM",
30
  "use_rslora": false
step_0/training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f79b6f5bfba4a790c423783b8a5e19f185cb5315ac152b80bd360821cf6c0fd8
3
  size 5560
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29fc5a7885616816fa23f850350a0c202a3859660fc64f78e9b04eb56f918849
3
  size 5560
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f79b6f5bfba4a790c423783b8a5e19f185cb5315ac152b80bd360821cf6c0fd8
3
  size 5560
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29fc5a7885616816fa23f850350a0c202a3859660fc64f78e9b04eb56f918849
3
  size 5560