aengusl commited on
Commit
f191e8e
1 Parent(s): 9379baf

Model save

Browse files
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cd0cf34a103b0d4255baef0304a07aaab862da0590fe3e2f5efcd1da17bfcad4
3
  size 258001832
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d217dfd5cf4ba158db6ebcd8e997f8f9cf06c3e6c21152059a1364c8bf75801
3
  size 258001832
runs/May28_17-47-07_e884ad0a7538/events.out.tfevents.1716918611.e884ad0a7538.6113.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:452a49086e99d014ba7d5d134c268eccd8b07414a1abeacf3503c08e6bb028b1
3
- size 7680
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6590f90f97a19f4dbb3a995ef46b4ebb34d1bc2f0882dc658349fabcab18aacc
3
+ size 8296
step_400/adapter_config.json CHANGED
@@ -16,12 +16,12 @@
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
- "v_proj",
20
- "o_proj",
21
  "q_proj",
 
22
  "up_proj",
23
- "down_proj",
24
- "k_proj"
25
  ],
26
  "task_type": "CAUSAL_LM"
27
  }
 
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
+ "k_proj",
20
+ "down_proj",
21
  "q_proj",
22
+ "v_proj",
23
  "up_proj",
24
+ "o_proj"
 
25
  ],
26
  "task_type": "CAUSAL_LM"
27
  }
step_400/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:75ba743e3492b44fe736fa8c1361aaeca7742eeb9ee39ebdfcdc49a96108d806
3
  size 258001832
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d217dfd5cf4ba158db6ebcd8e997f8f9cf06c3e6c21152059a1364c8bf75801
3
  size 258001832
step_400/training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:db504482b8901fd2018de26c6537464d22287182e13e3fb323c734c10d5304e4
3
  size 6200
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d94e2cb4cc9df520c5534db58255777cb00244c8f4c67c94b8014d40f0b0d77
3
  size 6200