Birchlabs commited on
Commit
cedd2cd
1 Parent(s): 09fb631

Upload 2 files

Browse files

update with Alpaca-derived finetune
https://wandb.ai/scottlogic/llm-stepwise/runs/0mrge3vo?workspace=user-birchlabs

Files changed (2) hide show
  1. adapter_config.json +5 -8
  2. adapter_model.bin +2 -2
adapter_config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "base_model_name_or_path": "huggyllama/llama-7b",
3
  "bias": "none",
4
  "fan_in_fan_out": false,
5
  "inference_mode": true,
@@ -7,19 +7,16 @@
7
  "layers_pattern": null,
8
  "layers_to_transform": null,
9
  "lora_alpha": 16,
10
- "lora_dropout": 0.0,
11
  "modules_to_save": null,
12
  "peft_type": "LORA",
13
- "r": 64,
14
  "revision": null,
15
  "target_modules": [
16
- "up_proj",
17
  "q_proj",
18
- "down_proj",
19
- "o_proj",
20
- "gate_proj",
21
  "k_proj",
22
- "v_proj"
 
23
  ],
24
  "task_type": "CAUSAL_LM"
25
  }
 
1
  {
2
+ "base_model_name_or_path": "decapoda-research/llama-7b-hf",
3
  "bias": "none",
4
  "fan_in_fan_out": false,
5
  "inference_mode": true,
 
7
  "layers_pattern": null,
8
  "layers_to_transform": null,
9
  "lora_alpha": 16,
10
+ "lora_dropout": 0.05,
11
  "modules_to_save": null,
12
  "peft_type": "LORA",
13
+ "r": 16,
14
  "revision": null,
15
  "target_modules": [
 
16
  "q_proj",
 
 
 
17
  "k_proj",
18
+ "v_proj",
19
+ "o_proj"
20
  ],
21
  "task_type": "CAUSAL_LM"
22
  }
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:541d84a5b6669f54547e93aebe7c5ef6c9a181678e4705abd6a0becf610529fa
3
- size 319977670
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f29350301e1d1b179c880c6dc5a726a0784d18524507cc480751a0728be37109
3
+ size 33646925