cyr19 commited on
Commit
9dbe9b6
1 Parent(s): ef822f2

Upload adapter_config.json with huggingface_hub

Browse files
Files changed (2) hide show
  1. .gitattributes +1 -0
  2. adapter_config.json +3 -34
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ adapter_config.json filter=lfs diff=lfs merge=lfs -text
adapter_config.json CHANGED
@@ -1,34 +1,3 @@
1
- {
2
- "alpha_pattern": {},
3
- "auto_mapping": null,
4
- "base_model_name_or_path": "meta-llama/Llama-2-7b-hf",
5
- "bias": "none",
6
- "fan_in_fan_out": false,
7
- "inference_mode": true,
8
- "init_lora_weights": true,
9
- "layer_replication": null,
10
- "layers_pattern": null,
11
- "layers_to_transform": null,
12
- "loftq_config": {},
13
- "lora_alpha": 16,
14
- "lora_dropout": 0.05,
15
- "megatron_config": null,
16
- "megatron_core": "megatron.core",
17
- "modules_to_save": [
18
- "embed_tokens"
19
- ],
20
- "peft_type": "LORA",
21
- "r": 16,
22
- "rank_pattern": {},
23
- "revision": null,
24
- "target_modules": [
25
- "o_proj",
26
- "k_proj",
27
- "q_proj",
28
- "v_proj",
29
- "embed_tokens"
30
- ],
31
- "task_type": "CAUSAL_LM",
32
- "use_dora": false,
33
- "use_rslora": false
34
- }
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3317be6a5441d511b7e9bdd1dd27725e5a9338c9f6986505bc6414daed1674c8
3
+ size 1116538992