engineering-lamini commited on
Commit
fe584da
1 Parent(s): 4077058

Upload LlamaForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +1 -1
  2. generation_config.json +3 -3
  3. model.safetensors +2 -2
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_name_or_path": "lamini/tiny-random-llama",
3
  "architectures": [
4
- "LlamaModel"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
 
1
  {
2
  "_name_or_path": "lamini/tiny-random-llama",
3
  "architectures": [
4
+ "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
generation_config.json CHANGED
@@ -2,6 +2,6 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 0,
4
  "eos_token_id": 1,
5
- "pad_token_id": -1,
6
- "transformers_version": "4.38.0.dev0"
7
- }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 0,
4
  "eos_token_id": 1,
5
+ "pad_token_id": 0,
6
+ "transformers_version": "4.45.2"
7
+ }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a18924305e4d11ccd1b6b17012d593f7a0cc0b984a3f8b21a6cc423849544570
3
- size 35265536
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c68c184ea11eb5df789f09fec38bc062a370ff9f0af5cef527d21b1de137691d
3
+ size 68033744