paultrust commited on
Commit
1612b19
1 Parent(s): f6d62a4

Upload model

Browse files
Files changed (3) hide show
  1. README.md +5 -5
  2. adapter_config.json +3 -2
  3. adapter_model.bin +2 -2
README.md CHANGED
@@ -5,15 +5,15 @@ library_name: peft
5
 
6
 
7
  The following `bitsandbytes` quantization config was used during training:
8
- - load_in_8bit: False
9
- - load_in_4bit: True
10
  - llm_int8_threshold: 6.0
11
  - llm_int8_skip_modules: None
12
  - llm_int8_enable_fp32_cpu_offload: False
13
  - llm_int8_has_fp16_weight: False
14
- - bnb_4bit_quant_type: nf4
15
- - bnb_4bit_use_double_quant: True
16
- - bnb_4bit_compute_dtype: bfloat16
17
  ### Framework versions
18
 
19
 
 
5
 
6
 
7
  The following `bitsandbytes` quantization config was used during training:
8
+ - load_in_8bit: True
9
+ - load_in_4bit: False
10
  - llm_int8_threshold: 6.0
11
  - llm_int8_skip_modules: None
12
  - llm_int8_enable_fp32_cpu_offload: False
13
  - llm_int8_has_fp16_weight: False
14
+ - bnb_4bit_quant_type: fp4
15
+ - bnb_4bit_use_double_quant: False
16
+ - bnb_4bit_compute_dtype: float32
17
  ### Framework versions
18
 
19
 
adapter_config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "base_model_name_or_path": "tiiuae/falcon-7b-instruct",
3
  "bias": "none",
4
  "fan_in_fan_out": false,
5
  "inference_mode": true,
@@ -13,7 +13,8 @@
13
  "r": 16,
14
  "revision": null,
15
  "target_modules": [
16
- "query_key_value"
 
17
  ],
18
  "task_type": "CAUSAL_LM"
19
  }
 
1
  {
2
+ "base_model_name_or_path": "facebook/opt-6.7b",
3
  "bias": "none",
4
  "fan_in_fan_out": false,
5
  "inference_mode": true,
 
13
  "r": 16,
14
  "revision": null,
15
  "target_modules": [
16
+ "q_proj",
17
+ "v_proj"
18
  ],
19
  "task_type": "CAUSAL_LM"
20
  }
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d0c7880b07f3f368538d5aca531471d8cfe57f5eb1cb8c663fd9d3bb85a1a8f7
3
- size 18898161
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d86d27b2c6f0e967dbe39fb4c80df235947cbd36a7e366d9538ea770b84cf5df
3
+ size 33601485