Safetensors
unsloth
DavidLanz commited on
Commit
18be84b
·
verified ·
1 Parent(s): 973b906

Upload model trained with Unsloth

Browse files

Upload model trained with Unsloth 2x faster

Files changed (2) hide show
  1. adapter_config.json +6 -4
  2. adapter_model.safetensors +1 -1
adapter_config.json CHANGED
@@ -23,11 +23,12 @@
23
  "lora_alpha": 256,
24
  "lora_bias": false,
25
  "lora_dropout": 0,
 
26
  "megatron_config": null,
27
  "megatron_core": "megatron.core",
28
  "modules_to_save": null,
29
  "peft_type": "LORA",
30
- "peft_version": "0.18.1",
31
  "qalora_group_size": 16,
32
  "r": 128,
33
  "rank_pattern": {},
@@ -35,15 +36,16 @@
35
  "target_modules": [
36
  "v_proj",
37
  "up_proj",
 
38
  "o_proj",
39
- "down_proj",
40
  "q_proj",
41
- "gate_proj",
42
- "k_proj"
43
  ],
44
  "target_parameters": null,
45
  "task_type": "CAUSAL_LM",
46
  "trainable_token_indices": null,
 
47
  "use_dora": false,
48
  "use_qalora": false,
49
  "use_rslora": false
 
23
  "lora_alpha": 256,
24
  "lora_bias": false,
25
  "lora_dropout": 0,
26
+ "lora_ga_config": null,
27
  "megatron_config": null,
28
  "megatron_core": "megatron.core",
29
  "modules_to_save": null,
30
  "peft_type": "LORA",
31
+ "peft_version": "0.19.1",
32
  "qalora_group_size": 16,
33
  "r": 128,
34
  "rank_pattern": {},
 
36
  "target_modules": [
37
  "v_proj",
38
  "up_proj",
39
+ "k_proj",
40
  "o_proj",
 
41
  "q_proj",
42
+ "down_proj",
43
+ "gate_proj"
44
  ],
45
  "target_parameters": null,
46
  "task_type": "CAUSAL_LM",
47
  "trainable_token_indices": null,
48
+ "use_bdlora": null,
49
  "use_dora": false,
50
  "use_qalora": false,
51
  "use_rslora": false
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:77977251105f97e44823192b5b2f5de272b3f728b6c10a70a412b368f4b4b9cd
3
  size 121537408
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c10f86d510945705c469d7fc4237d3e04988571ebd6b13796d5cb27a6303269b
3
  size 121537408