upload model

#1
by olanigan - opened
Files changed (3) hide show
  1. README.md +0 -37
  2. adapter_config.json +0 -21
  3. adapter_model.bin +0 -3
README.md DELETED
@@ -1,37 +0,0 @@
1
- ---
2
- library_name: peft
3
- pipeline_tag: text-generation
4
- tags:
5
- - code
6
- ---
7
- ## Training procedure
8
-
9
-
10
- The following `bitsandbytes` quantization config was used during training:
11
- - quant_method: bitsandbytes
12
- - load_in_8bit: False
13
- - load_in_4bit: True
14
- - llm_int8_threshold: 6.0
15
- - llm_int8_skip_modules: None
16
- - llm_int8_enable_fp32_cpu_offload: False
17
- - llm_int8_has_fp16_weight: False
18
- - bnb_4bit_quant_type: nf4
19
- - bnb_4bit_use_double_quant: True
20
- - bnb_4bit_compute_dtype: float16
21
-
22
- The following `bitsandbytes` quantization config was used during training:
23
- - quant_method: bitsandbytes
24
- - load_in_8bit: False
25
- - load_in_4bit: True
26
- - llm_int8_threshold: 6.0
27
- - llm_int8_skip_modules: None
28
- - llm_int8_enable_fp32_cpu_offload: False
29
- - llm_int8_has_fp16_weight: False
30
- - bnb_4bit_quant_type: nf4
31
- - bnb_4bit_use_double_quant: True
32
- - bnb_4bit_compute_dtype: float16
33
- ### Framework versions
34
-
35
- - PEFT 0.5.0
36
-
37
- - PEFT 0.5.0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
adapter_config.json DELETED
@@ -1,21 +0,0 @@
1
- {
2
- "auto_mapping": null,
3
- "base_model_name_or_path": "meta-llama/Llama-2-7b-hf",
4
- "bias": "none",
5
- "fan_in_fan_out": false,
6
- "inference_mode": true,
7
- "init_lora_weights": true,
8
- "layers_pattern": null,
9
- "layers_to_transform": null,
10
- "lora_alpha": 16,
11
- "lora_dropout": 0.05,
12
- "modules_to_save": null,
13
- "peft_type": "LORA",
14
- "r": 8,
15
- "revision": null,
16
- "target_modules": [
17
- "q_proj",
18
- "v_proj"
19
- ],
20
- "task_type": "CAUSAL_LM"
21
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
adapter_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:72f32b83569e46cf8cc8983927153e32aca78257b30085349108e9011775fbe5
3
- size 16822989