mdsalem17 commited on
Commit
b411f0c
1 Parent(s): 030993b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -11,7 +11,7 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStream
11
 
12
  DESCRIPTION = "# Claire Chat"
13
 
14
- MAX_MAX_NEW_TOKENS = 2048
15
  DEFAULT_MAX_NEW_TOKENS = 1024
16
  MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
17
 
@@ -25,10 +25,10 @@ def generate(
25
  message: str,
26
  chat_history: list[tuple[str, str]],
27
  system_prompt: str = "",
28
- max_new_tokens: int = 1024,
29
  temperature: float = 0.7,
30
  top_p: float = 0.95,
31
- top_k: int = 50,
32
  repetition_penalty: float = 1.0,
33
  ) -> Iterator[str]:
34
  conversation = []
 
11
 
12
  DESCRIPTION = "# Claire Chat"
13
 
14
+ MAX_MAX_NEW_TOKENS = 256
15
  DEFAULT_MAX_NEW_TOKENS = 1024
16
  MAX_INPUT_TOKEN_LENGTH = int(os.getenv("MAX_INPUT_TOKEN_LENGTH", "4096"))
17
 
 
25
  message: str,
26
  chat_history: list[tuple[str, str]],
27
  system_prompt: str = "",
28
+ max_new_tokens: int = 256,
29
  temperature: float = 0.7,
30
  top_p: float = 0.95,
31
+ top_k: int = 10,
32
  repetition_penalty: float = 1.0,
33
  ) -> Iterator[str]:
34
  conversation = []