mikecovlee commited on
Commit
0455091
1 Parent(s): 7221067

Upload 2 files

Browse files
Files changed (2) hide show
  1. adapter_config.json +9 -4
  2. adapter_model.bin +2 -2
adapter_config.json CHANGED
@@ -1,11 +1,14 @@
1
  {
2
  "bias": "none",
3
  "peft_type": "MIXLORA",
4
- "task_type": "CAUSAL_LM",
5
- "r": 8,
6
- "lora_alpha": 16,
7
  "lora_dropout": 0.05,
8
  "target_modules": [
 
 
 
 
9
  "w1_proj",
10
  "w2_proj",
11
  "w3_proj"
@@ -13,5 +16,7 @@
13
  "routing_strategy": "mixtral",
14
  "num_experts": 8,
15
  "act_fn": "silu",
16
- "top_k": 3
 
 
17
  }
 
1
  {
2
  "bias": "none",
3
  "peft_type": "MIXLORA",
4
+ "r": 16,
5
+ "lora_alpha": 32,
 
6
  "lora_dropout": 0.05,
7
  "target_modules": [
8
+ "q_proj",
9
+ "k_proj",
10
+ "v_proj",
11
+ "o_proj",
12
  "w1_proj",
13
  "w2_proj",
14
  "w3_proj"
 
16
  "routing_strategy": "mixtral",
17
  "num_experts": 8,
18
  "act_fn": "silu",
19
+ "top_k": 2,
20
+ "base_model_name_or_path": "/host_data/Llama-2-7b-hf",
21
+ "task_type": "CAUSAL_LM"
22
  }
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6ad12b855eddc19cb40331ea5d031130f690f7078a6bdec829bd1e5e98d73596
3
- size 375936965
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:365d33be1c14b218381c260b1c51dfb287a18d229013cdf4e4b2fddf1e6bd192
3
+ size 814375941