aengusl commited on
Commit
d47a9f3
1 Parent(s): 8f3cbd9

Model save

Browse files
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9fcfb19018d90f8f4bc50f1ba0d9525982b413b86dac0f124e50fb89eefd9036
3
  size 258001832
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f272268e56a0e4569fb78b72b82a3d1640b85d1a506cdc4ca00fae7474ec4318
3
  size 258001832
runs/Jun13_19-09-54_dccc3444dd3c/events.out.tfevents.1718305870.dccc3444dd3c.137614.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:750a329bb03463502c6e955772a952755a638153b6f626326f9a24d9d78babdf
3
- size 80060
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2e58224a36ef83c11d3e91175a0d2b425bad80fcb6c1e57501e85352388afaa
3
+ size 83200
step_500/adapter_config.json CHANGED
@@ -16,12 +16,12 @@
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
- "up_proj",
20
  "v_proj",
21
- "q_proj",
22
  "k_proj",
23
- "o_proj",
24
- "down_proj"
 
25
  ],
26
  "task_type": "CAUSAL_LM"
27
  }
 
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
+ "down_proj",
20
  "v_proj",
 
21
  "k_proj",
22
+ "q_proj",
23
+ "up_proj",
24
+ "o_proj"
25
  ],
26
  "task_type": "CAUSAL_LM"
27
  }
step_500/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:496430c326b6692448255ffe4af54d3706c2318374acbf5244444f3f24c74010
3
  size 258001832
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f69cf18bc3151907391d42ff9a8f93c767cfc96016daa6ed5ab9bd37299cc966
3
  size 258001832
step_500/training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:141a3bbe872c69bee25c29bea550a0a65e47e19272018c2d758e0bf4fd444e54
3
  size 6200
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30d0ae3dfd32b6142d983e67ef62dec8157f29777e7f12c400f04900af76ecfa
3
  size 6200