Upload folder using huggingface_hub (#2)
Browse files- b7b439f4c423c65a16fb1a0d10d12718959b59770813bc10a2e5014165e1e6f5 (8e7ffbf51fd747ef2a66e4f62813cfcf0056f904)
- 166935cc528a34f76607ed63766d2cc981c68b29209b4fdf3b8587e4837df0b8 (fc19d97add41804840a856276f8b95466bdbb5a1)
- config.json +2 -1
- generation_config.json +1 -1
- model.safetensors +1 -1
config.json
CHANGED
@@ -4,6 +4,7 @@
|
|
4 |
"LlamaForCausalLM"
|
5 |
],
|
6 |
"attention_bias": false,
|
|
|
7 |
"bos_token_id": 1,
|
8 |
"eos_token_id": 2,
|
9 |
"hidden_act": "silu",
|
@@ -35,7 +36,7 @@
|
|
35 |
"rope_theta": 10000.0,
|
36 |
"tie_word_embeddings": false,
|
37 |
"torch_dtype": "float16",
|
38 |
-
"transformers_version": "4.
|
39 |
"use_cache": true,
|
40 |
"vocab_size": 32000
|
41 |
}
|
|
|
4 |
"LlamaForCausalLM"
|
5 |
],
|
6 |
"attention_bias": false,
|
7 |
+
"attention_dropout": 0.0,
|
8 |
"bos_token_id": 1,
|
9 |
"eos_token_id": 2,
|
10 |
"hidden_act": "silu",
|
|
|
36 |
"rope_theta": 10000.0,
|
37 |
"tie_word_embeddings": false,
|
38 |
"torch_dtype": "float16",
|
39 |
+
"transformers_version": "4.36.2",
|
40 |
"use_cache": true,
|
41 |
"vocab_size": 32000
|
42 |
}
|
generation_config.json
CHANGED
@@ -3,5 +3,5 @@
|
|
3 |
"bos_token_id": 1,
|
4 |
"eos_token_id": 2,
|
5 |
"pad_token_id": 0,
|
6 |
-
"transformers_version": "4.
|
7 |
}
|
|
|
3 |
"bos_token_id": 1,
|
4 |
"eos_token_id": 2,
|
5 |
"pad_token_id": 0,
|
6 |
+
"transformers_version": "4.36.2"
|
7 |
}
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2957619856
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a056ac4a2f25ffef23be33b425379552d4ccd10f4cb9957fa9354ed8563cf8c9
|
3 |
size 2957619856
|