Esmeetu commited on
Commit
647e99a
·
1 Parent(s): e3460f2

Update config.json

Browse files

it think `max_position_embeddings` should be 4096 here. Reference: https://huggingface.co/docs/transformers/v4.35.2/en/model_doc/llama#transformers.LlamaConfig.rope_scaling

Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -8,7 +8,7 @@
8
  "hidden_size": 7168,
9
  "initializer_range": 0.02,
10
  "intermediate_size": 19200,
11
- "max_position_embeddings": 16384,
12
  "model_type": "llama",
13
  "num_attention_heads": 56,
14
  "num_hidden_layers": 62,
 
8
  "hidden_size": 7168,
9
  "initializer_range": 0.02,
10
  "intermediate_size": 19200,
11
+ "max_position_embeddings": 4096,
12
  "model_type": "llama",
13
  "num_attention_heads": 56,
14
  "num_hidden_layers": 62,