nm-autobot
commited on
Upload folder using huggingface_hub
Browse files- config.json +2 -2
- generation_config.json +1 -1
config.json
CHANGED
@@ -53,7 +53,7 @@
|
|
53 |
}
|
54 |
},
|
55 |
"format": "int-quantized",
|
56 |
-
"global_compression_ratio": 1.
|
57 |
"ignore": [
|
58 |
"lm_head"
|
59 |
],
|
@@ -66,7 +66,7 @@
|
|
66 |
"rope_theta": 10000.0,
|
67 |
"tie_word_embeddings": false,
|
68 |
"torch_dtype": "bfloat16",
|
69 |
-
"transformers_version": "4.
|
70 |
"use_cache": true,
|
71 |
"vocab_size": 32000
|
72 |
}
|
|
|
53 |
}
|
54 |
},
|
55 |
"format": "int-quantized",
|
56 |
+
"global_compression_ratio": 1.5236665854369158,
|
57 |
"ignore": [
|
58 |
"lm_head"
|
59 |
],
|
|
|
66 |
"rope_theta": 10000.0,
|
67 |
"tie_word_embeddings": false,
|
68 |
"torch_dtype": "bfloat16",
|
69 |
+
"transformers_version": "4.48.0",
|
70 |
"use_cache": true,
|
71 |
"vocab_size": 32000
|
72 |
}
|
generation_config.json
CHANGED
@@ -3,5 +3,5 @@
|
|
3 |
"eos_token_id": 2,
|
4 |
"max_length": 2048,
|
5 |
"pad_token_id": 0,
|
6 |
-
"transformers_version": "4.
|
7 |
}
|
|
|
3 |
"eos_token_id": 2,
|
4 |
"max_length": 2048,
|
5 |
"pad_token_id": 0,
|
6 |
+
"transformers_version": "4.48.0"
|
7 |
}
|