saffr0n commited on
Commit
8477c36
1 Parent(s): e25c8ef

Make max input token length longer

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -9,7 +9,7 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStream
9
 
10
  MAX_MAX_NEW_TOKENS = 1024
11
  DEFAULT_MAX_NEW_TOKENS = 512
12
- MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "1024"))
13
 
14
  DESCRIPTION = """\
15
  # Llama-2 7B Chat
 
9
 
10
  MAX_MAX_NEW_TOKENS = 1024
11
  DEFAULT_MAX_NEW_TOKENS = 512
12
+ MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
13
 
14
  DESCRIPTION = """\
15
  # Llama-2 7B Chat