aengusl commited on
Commit
3d04942
1 Parent(s): a349651

Model save

Browse files
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:93696b7b91cc0fda26ed17de62e819a016a7ed5302e3f1cf3c140a90735121a4
3
  size 258001832
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db597d3628cf503865af052a582d381a99d6bc024f439e1181e203d9f15fcd45
3
  size 258001832
runs/Jun13_19-09-54_dccc3444dd3c/events.out.tfevents.1718305870.dccc3444dd3c.137614.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:117c7227caac2aa8ac18c3b186ed789f469b6ee58369ab7f5e8943c09a6b3c29
3
- size 17401
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb0f024ab59fd2b37aab2d6af2e82df9208e920bceba8801767ff60a1c07a4ea
3
+ size 20481
step_100/adapter_config.json CHANGED
@@ -16,12 +16,12 @@
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
- "up_proj",
20
  "v_proj",
21
- "q_proj",
22
  "k_proj",
23
- "o_proj",
24
- "down_proj"
 
25
  ],
26
  "task_type": "CAUSAL_LM"
27
  }
 
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
+ "down_proj",
20
  "v_proj",
 
21
  "k_proj",
22
+ "q_proj",
23
+ "up_proj",
24
+ "o_proj"
25
  ],
26
  "task_type": "CAUSAL_LM"
27
  }
step_100/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6d8244d9789b2fed4d1c31010b6d822e0bbbb4b0441a08a666c07a6fc5da2caf
3
  size 258001832
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d3ea0c1bf5bc74087a236372dbb16c86e34a912a498768d544c2d3e1dce35a0
3
  size 258001832
step_100/training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:141a3bbe872c69bee25c29bea550a0a65e47e19272018c2d758e0bf4fd444e54
3
  size 6200
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30d0ae3dfd32b6142d983e67ef62dec8157f29777e7f12c400f04900af76ecfa
3
  size 6200