mikecovlee commited on
Commit
5f97d19
1 Parent(s): d285f33

Upload 2 files

Browse files
Files changed (2) hide show
  1. adapter_config.json +6 -3
  2. adapter_model.bin +2 -2
adapter_config.json CHANGED
@@ -5,15 +5,18 @@
5
  "lora_alpha": 16,
6
  "lora_dropout": 0.05,
7
  "target_modules": [
 
 
 
8
  "o_proj",
 
9
  "down_proj",
10
- "qkv_proj",
11
- "gate_up_proj"
12
  ],
13
  "routing_strategy": "mixtral",
14
  "num_experts": 8,
15
  "act_fn": "silu",
16
  "top_k": 2,
17
- "base_model_name_or_path": "microsoft/Phi-3-mini-4k-instruct",
18
  "task_type": "CAUSAL_LM"
19
  }
 
5
  "lora_alpha": 16,
6
  "lora_dropout": 0.05,
7
  "target_modules": [
8
+ "q_proj",
9
+ "k_proj",
10
+ "v_proj",
11
  "o_proj",
12
+ "gate_proj",
13
  "down_proj",
14
+ "up_proj"
 
15
  ],
16
  "routing_strategy": "mixtral",
17
  "num_experts": 8,
18
  "act_fn": "silu",
19
  "top_k": 2,
20
+ "base_model_name_or_path": "TinyLlama/TinyLlama_v1.1",
21
  "task_type": "CAUSAL_LM"
22
  }
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9b8d35208d964d4a04bc24270b73ecda5ce20bbb93911182b05f9340e7b66a66
3
- size 274122242
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:303bd3dc84143931dd91498cfd5c9768d44405fa33a75c17d73697421c106d00
3
+ size 140680928