Debasishiarcs commited on
Commit
154cd2f
1 Parent(s): 72703b4

Upload LlamaForCausalLM

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "abhishek/llama-2-7b-hf-small-shards",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -10,7 +10,7 @@
10
  "hidden_size": 4096,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 11008,
13
- "max_position_embeddings": 2048,
14
  "model_type": "llama",
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
 
1
  {
2
+ "_name_or_path": "NousResearch/Llama-2-7b-chat-hf",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
10
  "hidden_size": 4096,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 11008,
13
+ "max_position_embeddings": 4096,
14
  "model_type": "llama",
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
model-00001-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6c770f4dc6c64c8b05f379a19ddad1fbe7d9b38813838bb9ac1fdf3885edf7c8
3
  size 4938985248
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6249d46e165c409a7c81ef5dc0681b2acaffd8bb9acc6702dc250e8675894eb
3
  size 4938985248
model-00002-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d8187e2bd6ea589b7adb96a9444bd5bbfc39947dd4b3b0ef18b5d95071451d6c
3
  size 4947390768
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9922b157451b81c87953d80cbd04fce49921b68359b9477f83d14c12ff00a601
3
  size 4947390768
model-00003-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:57bed91215364d88d41b3df2ce826cc567ee571572b40c25246cf6818b6a7698
3
  size 3590488736
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:04717de51c1f65896cd6b7b1c623143b44bd3a8e1c6c01b2d2f772d181a24924
3
  size 3590488736