aengusl commited on
Commit
1631443
1 Parent(s): caf9e39

Model save

Browse files
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e70af373322117061fc971d234e226f7df18f2b11378ba1864c2cd137a6ebbed
3
  size 258001832
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4dd09953751de03d88be83328ab731dc61ac8d1e571a73b8dfbe5af346dd8764
3
  size 258001832
runs/May28_17-47-07_e884ad0a7538/events.out.tfevents.1716918611.e884ad0a7538.6113.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c61f7ca14f163dccea5eca7d5363608188e3f1ace3022b16f6ed41562ee6e594
3
- size 13933
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f1452d5a6ee04bcf0da6629bc3869f47dd8cb05d3d8178760012a051188ab399
3
+ size 14561
step_1200/adapter_config.json CHANGED
@@ -16,12 +16,12 @@
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
- "v_proj",
20
- "o_proj",
21
  "q_proj",
 
22
  "up_proj",
23
- "down_proj",
24
- "k_proj"
25
  ],
26
  "task_type": "CAUSAL_LM"
27
  }
 
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
+ "k_proj",
20
+ "down_proj",
21
  "q_proj",
22
+ "v_proj",
23
  "up_proj",
24
+ "o_proj"
 
25
  ],
26
  "task_type": "CAUSAL_LM"
27
  }
step_1200/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a75a6eb7d6dbc326f8d6fd5957491f69a6615f9c061ef31693274894a8a861cf
3
  size 258001832
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4dd09953751de03d88be83328ab731dc61ac8d1e571a73b8dfbe5af346dd8764
3
  size 258001832
step_1200/training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:db504482b8901fd2018de26c6537464d22287182e13e3fb323c734c10d5304e4
3
  size 6200
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d94e2cb4cc9df520c5534db58255777cb00244c8f4c67c94b8014d40f0b0d77
3
  size 6200