areegtarek commited on
Commit
8676272
1 Parent(s): 8e21209

Trained with Unsloth

Browse files

Upload model trained with Unsloth 2x faster

config.json CHANGED
@@ -12,13 +12,14 @@
12
  "initializer_range": 0.02,
13
  "intermediate_size": 14336,
14
  "max_position_embeddings": 8192,
 
15
  "model_type": "llama",
16
  "num_attention_heads": 32,
17
  "num_hidden_layers": 32,
18
  "num_key_value_heads": 8,
19
  "pretraining_tp": 1,
20
  "quantization_config": {
21
- "bnb_4bit_compute_dtype": "float16",
22
  "bnb_4bit_quant_type": "nf4",
23
  "bnb_4bit_use_double_quant": true,
24
  "llm_int8_enable_fp32_cpu_offload": false,
@@ -33,9 +34,9 @@
33
  "rope_scaling": null,
34
  "rope_theta": 500000.0,
35
  "tie_word_embeddings": false,
36
- "torch_dtype": "float16",
37
- "transformers_version": "4.40.2",
38
  "unsloth_version": "2024.5",
39
- "use_cache": true,
40
  "vocab_size": 128256
41
  }
 
12
  "initializer_range": 0.02,
13
  "intermediate_size": 14336,
14
  "max_position_embeddings": 8192,
15
+ "mlp_bias": false,
16
  "model_type": "llama",
17
  "num_attention_heads": 32,
18
  "num_hidden_layers": 32,
19
  "num_key_value_heads": 8,
20
  "pretraining_tp": 1,
21
  "quantization_config": {
22
+ "bnb_4bit_compute_dtype": "bfloat16",
23
  "bnb_4bit_quant_type": "nf4",
24
  "bnb_4bit_use_double_quant": true,
25
  "llm_int8_enable_fp32_cpu_offload": false,
 
34
  "rope_scaling": null,
35
  "rope_theta": 500000.0,
36
  "tie_word_embeddings": false,
37
+ "torch_dtype": "bfloat16",
38
+ "transformers_version": "4.41.2",
39
  "unsloth_version": "2024.5",
40
+ "use_cache": false,
41
  "vocab_size": 128256
42
  }
generation_config.json CHANGED
@@ -5,5 +5,5 @@
5
  "max_length": 4096,
6
  "temperature": 0.6,
7
  "top_p": 0.9,
8
- "transformers_version": "4.40.2"
9
  }
 
5
  "max_length": 4096,
6
  "temperature": 0.6,
7
  "top_p": 0.9,
8
+ "transformers_version": "4.41.2"
9
  }
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e7b2a77b09e92bdff3854850b17a452b4d17863c400d0ed8b17270d8b6654a85
3
- size 4652072863
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef209c052b3997d771cdc70ca185fde83f4b011b93dd3cb4730b6199e01736ae
3
+ size 4652072928
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d4b53fa307a8988f692ffbfdb1fc2503ae0980a799e3fc9b75f1d5e23781fb37
3
  size 1050673280
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c5394fe848f6eafef2a44f085e7b72e5ef0d315f98aa87b3827c2b1e6983b89
3
  size 1050673280
model.safetensors.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 5702577743
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00002-of-00002.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_size": 5702577744
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00002-of-00002.safetensors",