Upload folder using huggingface_hub (#3)
Browse files- 53e01972596109df6ad72e885f6e2dfe6a1aa1922fb767af667e60cef5b69f50 (e6eaa385eacb05e7b01daae082635ed49214e04a)
- 10f8b3d2cfa035bfb883c76188bbc0a9cc094c35c48e12d4c5e0025fb3b62a9e (c7f3bf6860d05cff1dd8be93a9012de8240431ae)
- 901389e5816f7bd516d93da3206f866a22f05d9fcff502bda8c5c554527b816d (16b5d72d271e50d5f65311370cc8a5d9da2e07ca)
- d4a2881dd7bb3e4e85851504c1337ea994bf21ce09ed8cacca8c6b62366c8d17 (1a1940f5e48b8218c7d5fffd2849ba4cfd4a3aac)
- d4687d3207319b5b27feb4cfbca7639fabe0ceca4bfcab07df0705294e555dad (f1feb856c2b014111f5478d4d51e9ebcc44e4a38)
- config.json +4 -2
- generation_config.json +1 -1
- model-00001-of-00004.safetensors +3 -0
- model-00002-of-00004.safetensors +3 -0
- model-00003-of-00004.safetensors +3 -0
- model-00004-of-00004.safetensors +3 -0
- model.safetensors.index.json +0 -0
config.json
CHANGED
@@ -4,6 +4,7 @@
|
|
4 |
"LlamaForCausalLM"
|
5 |
],
|
6 |
"attention_bias": false,
|
|
|
7 |
"bos_token_id": 1,
|
8 |
"eos_token_id": 2,
|
9 |
"hidden_act": "silu",
|
@@ -26,14 +27,15 @@
|
|
26 |
"model_version": 1,
|
27 |
"outlier_channel_split": false,
|
28 |
"packsz": 4,
|
29 |
-
"rescale_WH": false
|
|
|
30 |
},
|
31 |
"rms_norm_eps": 1e-05,
|
32 |
"rope_scaling": null,
|
33 |
"rope_theta": 10000.0,
|
34 |
"tie_word_embeddings": false,
|
35 |
"torch_dtype": "float16",
|
36 |
-
"transformers_version": "4.
|
37 |
"use_cache": true,
|
38 |
"vocab_size": 32000
|
39 |
}
|
|
|
4 |
"LlamaForCausalLM"
|
5 |
],
|
6 |
"attention_bias": false,
|
7 |
+
"attention_dropout": 0.0,
|
8 |
"bos_token_id": 1,
|
9 |
"eos_token_id": 2,
|
10 |
"hidden_act": "silu",
|
|
|
27 |
"model_version": 1,
|
28 |
"outlier_channel_split": false,
|
29 |
"packsz": 4,
|
30 |
+
"rescale_WH": false,
|
31 |
+
"resid_scale_override": -1
|
32 |
},
|
33 |
"rms_norm_eps": 1e-05,
|
34 |
"rope_scaling": null,
|
35 |
"rope_theta": 10000.0,
|
36 |
"tie_word_embeddings": false,
|
37 |
"torch_dtype": "float16",
|
38 |
+
"transformers_version": "4.36.2",
|
39 |
"use_cache": true,
|
40 |
"vocab_size": 32000
|
41 |
}
|
generation_config.json
CHANGED
@@ -2,5 +2,5 @@
|
|
2 |
"_from_model_config": true,
|
3 |
"bos_token_id": 1,
|
4 |
"eos_token_id": 2,
|
5 |
-
"transformers_version": "4.
|
6 |
}
|
|
|
2 |
"_from_model_config": true,
|
3 |
"bos_token_id": 1,
|
4 |
"eos_token_id": 2,
|
5 |
+
"transformers_version": "4.36.2"
|
6 |
}
|
model-00001-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:759f972df253f5836d0a4de2a57b118b513ae467e5c548951189148b6bf025e6
|
3 |
+
size 4983111796
|
model-00002-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5637f4c2eccdb2c772a52f8add6b1b3e8e02187fd077e236d03ae1efe6c0c147
|
3 |
+
size 4931379986
|
model-00003-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4f46cdeea631914f3e935c6ac1a5c14846cd140752338e1692a90cbea8b416d2
|
3 |
+
size 4999636952
|
model-00004-of-00004.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:740f5ce16a69b1ce8d2b4b53ed77144810de6dc4ab28c0b636e8dfb3b28b4666
|
3 |
+
size 2348368874
|
model.safetensors.index.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|