Upload folder using huggingface_hub
#5
by
schwgHao
- opened
config.json
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "
|
3 |
"architectures": [
|
4 |
-
"
|
5 |
],
|
6 |
"bos_token_id": 1,
|
7 |
"eos_token_id": 2,
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "/cognitive_comp/liangyuxin/workspace/pipeline/ckpt/reward_model/0817_RM13B_MIX/global_step6517_hf",
|
3 |
"architectures": [
|
4 |
+
"LlamaHFRewardModel_Mix"
|
5 |
],
|
6 |
"bos_token_id": 1,
|
7 |
"eos_token_id": 2,
|
pytorch_model-00001-of-00006.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1e8048785ef19d7746f9d94ee6951e0cdbc50493b96d1591c6f57fe96c3407ec
|
3 |
+
size 9898869078
|
pytorch_model-00002-of-00006.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:84783f31b2264e2d9fbb2e8c80e7a6aa3c90cbef1632b811ea3f88befa83596c
|
3 |
+
size 9940854943
|
pytorch_model-00003-of-00006.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:33d859813a54bed5c466448717ca0f825a57de101a4ba4c5fc6e8cdb86f2d157
|
3 |
+
size 9867413310
|
pytorch_model-00004-of-00006.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a93e1f69bdc18dbfa0f8443e25f4fda63329d50aab2fa8f135af51cc01ebedbf
|
3 |
+
size 9867454940
|
pytorch_model-00005-of-00006.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b56e3799ffc0c8bc49734c7f971f553286d7740aa61471302254e91d1800001a
|
3 |
+
size 9867455016
|
pytorch_model-00006-of-00006.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f8fe66e95701880bbd212ce0aa001ae646807a6e4d985cf1a7cc86fb4691f89b
|
3 |
+
size 2118272809
|
pytorch_model.bin.index.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
{
|
2 |
"metadata": {
|
3 |
-
"total_size":
|
4 |
},
|
5 |
"weight_map": {
|
6 |
"model.embed_tokens.weight": "pytorch_model-00001-of-00006.bin",
|
@@ -405,6 +405,8 @@
|
|
405 |
"model.layers.9.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00006.bin",
|
406 |
"model.layers.9.self_attn.v_proj.weight": "pytorch_model-00002-of-00006.bin",
|
407 |
"model.norm.weight": "pytorch_model-00006-of-00006.bin",
|
|
|
|
|
408 |
"value_head.bias": "pytorch_model-00006-of-00006.bin",
|
409 |
"value_head.weight": "pytorch_model-00006-of-00006.bin"
|
410 |
}
|
|
|
1 |
{
|
2 |
"metadata": {
|
3 |
+
"total_size": 51560192008
|
4 |
},
|
5 |
"weight_map": {
|
6 |
"model.embed_tokens.weight": "pytorch_model-00001-of-00006.bin",
|
|
|
405 |
"model.layers.9.self_attn.rotary_emb.inv_freq": "pytorch_model-00002-of-00006.bin",
|
406 |
"model.layers.9.self_attn.v_proj.weight": "pytorch_model-00002-of-00006.bin",
|
407 |
"model.norm.weight": "pytorch_model-00006-of-00006.bin",
|
408 |
+
"token_value_head.bias": "pytorch_model-00006-of-00006.bin",
|
409 |
+
"token_value_head.weight": "pytorch_model-00006-of-00006.bin",
|
410 |
"value_head.bias": "pytorch_model-00006-of-00006.bin",
|
411 |
"value_head.weight": "pytorch_model-00006-of-00006.bin"
|
412 |
}
|