fsaudm commited on
Commit
c631618
1 Parent(s): 6e6c311

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "unsloth/Meta-Llama-3.1-70B-Instruct-bnb-4bit",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -30,7 +30,7 @@
30
  "bnb_4bit_quant_storage": "uint8",
31
  "bnb_4bit_quant_type": "nf4",
32
  "bnb_4bit_use_double_quant": true,
33
- "llm_int8_enable_fp32_cpu_offload": false,
34
  "llm_int8_has_fp16_weight": false,
35
  "llm_int8_skip_modules": null,
36
  "llm_int8_threshold": 6.0,
@@ -48,7 +48,7 @@
48
  },
49
  "rope_theta": 500000.0,
50
  "tie_word_embeddings": false,
51
- "torch_dtype": "float16",
52
  "transformers_version": "4.43.2",
53
  "unsloth_version": "2024.9",
54
  "use_cache": true,
 
1
  {
2
+ "_name_or_path": "meta-llama/Meta-Llama-3.1-70B-Instruct",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
30
  "bnb_4bit_quant_storage": "uint8",
31
  "bnb_4bit_quant_type": "nf4",
32
  "bnb_4bit_use_double_quant": true,
33
+ "llm_int8_enable_fp32_cpu_offload": true,
34
  "llm_int8_has_fp16_weight": false,
35
  "llm_int8_skip_modules": null,
36
  "llm_int8_threshold": 6.0,
 
48
  },
49
  "rope_theta": 500000.0,
50
  "tie_word_embeddings": false,
51
+ "torch_dtype": "bfloat16",
52
  "transformers_version": "4.43.2",
53
  "unsloth_version": "2024.9",
54
  "use_cache": true,