vxbrandon commited on
Commit
ab98f6c
1 Parent(s): f23aac5

Training in progress, step 1002

Browse files
adapter_config.json CHANGED
@@ -21,9 +21,9 @@
21
  "target_modules": [
22
  "q_proj",
23
  "gate_proj",
24
- "v_proj",
25
  "up_proj",
26
- "down_proj"
 
27
  ],
28
  "task_type": "CAUSAL_LM"
29
  }
 
21
  "target_modules": [
22
  "q_proj",
23
  "gate_proj",
 
24
  "up_proj",
25
+ "down_proj",
26
+ "v_proj"
27
  ],
28
  "task_type": "CAUSAL_LM"
29
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:431e314ea12e00cff05b5491bf17885aed4aef9e0f06d64ab2b46eab7a71acbb
3
  size 281061608
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c97922e2ef0b2c95ed5d891d6c41a8cdc3c3504d6cbd85b0e41e3e0ff0a1356
3
  size 281061608
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9fb0d9e619634b6870d44addc7799b09be7de20cadeb9e883e15094e3ed1e6e9
3
  size 6456
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dfd459583752db22cdcb53e821c1bbc4a5b65d5cab79c70609e879659f4e3113
3
  size 6456