Upload GemmaForCausalLM
Browse files- README.md +6 -6
- config.json +1 -1
- model.safetensors +2 -2
README.md
CHANGED
|
@@ -3,13 +3,13 @@ license: apache-2.0
|
|
| 3 |
library_name: transformers
|
| 4 |
base_model: google/gemma-2b
|
| 5 |
tags:
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
language:
|
| 12 |
-
|
| 13 |
pipeline_tag: text-generation
|
| 14 |
---
|
| 15 |
|
|
|
|
| 3 |
library_name: transformers
|
| 4 |
base_model: google/gemma-2b
|
| 5 |
tags:
|
| 6 |
+
- text-generation
|
| 7 |
+
- fine-tuned
|
| 8 |
+
- pdf-grounded
|
| 9 |
+
- zero-hallucination
|
| 10 |
+
- technical-provenance
|
| 11 |
language:
|
| 12 |
+
- en
|
| 13 |
pipeline_tag: text-generation
|
| 14 |
---
|
| 15 |
|
config.json
CHANGED
|
@@ -5,7 +5,7 @@
|
|
| 5 |
"attention_bias": false,
|
| 6 |
"attention_dropout": 0.0,
|
| 7 |
"bos_token_id": 2,
|
| 8 |
-
"dtype": "
|
| 9 |
"eos_token_id": 1,
|
| 10 |
"head_dim": 256,
|
| 11 |
"hidden_act": "gelu",
|
|
|
|
| 5 |
"attention_bias": false,
|
| 6 |
"attention_dropout": 0.0,
|
| 7 |
"bos_token_id": 2,
|
| 8 |
+
"dtype": "bfloat16",
|
| 9 |
"eos_token_id": 1,
|
| 10 |
"head_dim": 256,
|
| 11 |
"hidden_act": "gelu",
|
model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2b98f194bf36c87de9ecdd88f88d8e3984504485df72bf29b40105eccc250b2d
|
| 3 |
+
size 5012363856
|