michaelbenayoun HF staff commited on
Commit
2e0b3c6
1 Parent(s): f533cf0

Upload LlamaForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +1 -1
  2. generation_config.json +1 -1
  3. model.safetensors +2 -2
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_name_or_path": "michaelbenayoun/llama-2-tiny-16layers-random",
3
  "architectures": [
4
- "LlamaModel"
5
  ],
6
  "attention_bias": false,
7
  "bos_token_id": 1,
 
1
  {
2
  "_name_or_path": "michaelbenayoun/llama-2-tiny-16layers-random",
3
  "architectures": [
4
+ "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "bos_token_id": 1,
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
  "pad_token_id": 0,
6
- "transformers_version": "4.35.0.dev0"
7
  }
 
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
  "pad_token_id": 0,
6
+ "transformers_version": "4.35.0"
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a69884da893fe5f8c6dd79a0854c4916dfd322a04e6a363a44e11754b370e7da
3
- size 4573472
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2f66be336da4f3c29fc1ac463800891d8194e701b016cac340240e43a4b007c
3
+ size 8670432