lmzheng commited on
Commit
86e5237
1 Parent(s): 043a612

Upload LlamaForCausalLM

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "vicuna-13b-v1.5-16k",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -9,8 +9,8 @@
9
  "hidden_size": 5120,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 13824,
12
- "max_sequence_length": 16384,
13
  "max_position_embeddings": 4096,
 
14
  "model_type": "llama",
15
  "num_attention_heads": 40,
16
  "num_hidden_layers": 40,
 
1
  {
2
+ "_name_or_path": "vicuna-13b-v1.5-16k/",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
9
  "hidden_size": 5120,
10
  "initializer_range": 0.02,
11
  "intermediate_size": 13824,
 
12
  "max_position_embeddings": 4096,
13
+ "max_sequence_length": 16384,
14
  "model_type": "llama",
15
  "num_attention_heads": 40,
16
  "num_hidden_layers": 40,
pytorch_model-00001-of-00003.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0a39fa63651994f2cc20fa06710cb2158b1b557aef329fcd04fede9f899c312c
3
  size 9948728430
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f085d68ba57e9b31993876e6c5d426dded74e1c2351b72c585775c625037ff4f
3
  size 9948728430
pytorch_model-00002-of-00003.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9bdea9603ac2c08f84d4e842a40269b9cc0c027cee6914230ea4e44aed6dbe4c
3
  size 9904165024
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d26a32b912d441407fafe52d5bf3f15b80cec145a76e276e4a1f979f5a82a8ba
3
  size 9904165024
pytorch_model-00003-of-00003.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b5d9be7cbca6fcbf6b6adeec8b11dcc14a20cdb60748c9f36e80a5acb2bb5241
3
  size 6178983625
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0ab06bbf15c8c0420fbc6a89b1ef0e6188845f27af63f0ebf87e36ccbf69e034
3
  size 6178983625