dickdiss commited on
Commit
0d1bcfa
1 Parent(s): bb55ef4

Upload Phi3ForCausalLM

Browse files
Files changed (2) hide show
  1. adapter_config.json +3 -3
  2. adapter_model.safetensors +1 -1
adapter_config.json CHANGED
@@ -4,7 +4,7 @@
4
  "base_model_name_or_path": "microsoft/Phi-3-mini-4k-instruct",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
- "inference_mode": true,
8
  "init_lora_weights": true,
9
  "layer_replication": null,
10
  "layers_pattern": null,
@@ -20,10 +20,10 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
23
  "q_proj",
24
  "v_proj",
25
- "o_proj",
26
- "k_proj"
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
 
4
  "base_model_name_or_path": "microsoft/Phi-3-mini-4k-instruct",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
+ "inference_mode": false,
8
  "init_lora_weights": true,
9
  "layer_replication": null,
10
  "layers_pattern": null,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "k_proj",
24
  "q_proj",
25
  "v_proj",
26
+ "o_proj"
 
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f00eddb9560800cfcc1fb352872711228027d6cfa71084499f4f68aaed59c3ac
3
  size 12591456
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:49ebb30bfd3c6ed84040ee94b76d7a8c581e2f74457892ce3335f1cfd3dd09c1
3
  size 12591456