update model_max_length to 2048

#11
by HectorL - opened
Files changed (1) hide show
  1. tokenizer_config.json +1 -1
tokenizer_config.json CHANGED
@@ -2,7 +2,7 @@
2
  "add_prefix_space": false,
3
  "bos_token": "<|endoftext|>",
4
  "eos_token": "<|endoftext|>",
5
- "model_max_length": 1000000000000000019884624838656,
6
  "name_or_path": "EleutherAI/pythia-6.9b",
7
  "special_tokens_map_file": "/admin/home-hailey/.cache/huggingface/hub/models--EleutherAI--gpt-neox-20b/snapshots/4e49eadb5d14bd22f314ec3f45b69a87b88c7691/special_tokens_map.json",
8
  "tokenizer_class": "GPTNeoXTokenizer",
 
2
  "add_prefix_space": false,
3
  "bos_token": "<|endoftext|>",
4
  "eos_token": "<|endoftext|>",
5
+ "model_max_length": 2048,
6
  "name_or_path": "EleutherAI/pythia-6.9b",
7
  "special_tokens_map_file": "/admin/home-hailey/.cache/huggingface/hub/models--EleutherAI--gpt-neox-20b/snapshots/4e49eadb5d14bd22f314ec3f45b69a87b88c7691/special_tokens_map.json",
8
  "tokenizer_class": "GPTNeoXTokenizer",