gerbyk commited on
Commit
951980d
1 Parent(s): a611da2

Upload folder using huggingface_hub

Browse files
adapter_config.json CHANGED
@@ -20,13 +20,8 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "k_proj",
24
- "o_proj",
25
- "up_proj",
26
- "down_proj",
27
  "q_proj",
28
- "v_proj",
29
- "gate_proj"
30
  ],
31
  "task_type": "CAUSAL_LM",
32
  "use_dora": false,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
 
 
 
23
  "q_proj",
24
+ "v_proj"
 
25
  ],
26
  "task_type": "CAUSAL_LM",
27
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a6dd56321b5c32fa671c6a6f7a5b6aa3930f58c627c68e9894799ac12888fa1b
3
- size 7548928
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c68c8fa9a2f32a931e2a37a733f41bff7805d0aa2fca289d2a69a848914e942
3
+ size 940256
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:65e01b48daec8cc6322db81f15c29342de87e58aca62e071c3792973d2575069
3
  size 5432
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7869c587b99b0dbd8d2263dd921942d24dfce2aef75b8b7a26cd208a11a4a57d
3
  size 5432