charanhu commited on
Commit
743e5f2
1 Parent(s): fd57e7f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -2
app.py CHANGED
@@ -15,7 +15,7 @@ def format_prompt(message, history):
15
  return prompt
16
 
17
  def generate(
18
- prompt, history, system_prompt, temperature=0.9, max_new_tokens=256, top_p=0.95, repetition_penalty=1.0,
19
  ):
20
  temperature = float(temperature)
21
  if temperature < 1e-2:
@@ -24,6 +24,7 @@ def generate(
24
 
25
  generate_kwargs = dict(
26
  temperature=temperature,
 
27
  max_new_tokens=max_new_tokens,
28
  top_p=top_p,
29
  repetition_penalty=repetition_penalty,
@@ -56,11 +57,20 @@ additional_inputs=[
56
  interactive=True,
57
  info="Higher values produce more diverse outputs",
58
  ),
 
 
 
 
 
 
 
 
 
59
  gr.Slider(
60
  label="Max new tokens",
61
  value=256,
62
  minimum=0,
63
- maximum=30000,
64
  step=64,
65
  interactive=True,
66
  info="The maximum numbers of new tokens",
 
15
  return prompt
16
 
17
  def generate(
18
+ prompt, history, system_prompt, temperature=0.9, max_new_tokens=4096, top_p=0.95, repetition_penalty=1.0,
19
  ):
20
  temperature = float(temperature)
21
  if temperature < 1e-2:
 
24
 
25
  generate_kwargs = dict(
26
  temperature=temperature,
27
+ min_new_tokens=min_new_tokens,
28
  max_new_tokens=max_new_tokens,
29
  top_p=top_p,
30
  repetition_penalty=repetition_penalty,
 
57
  interactive=True,
58
  info="Higher values produce more diverse outputs",
59
  ),
60
+ gr.Slider(
61
+ label="Min new tokens",
62
+ value=0,
63
+ minimum=0,
64
+ maximum=32000,
65
+ step=64,
66
+ interactive=True,
67
+ info="The minimum numbers of new tokens",
68
+ ),
69
  gr.Slider(
70
  label="Max new tokens",
71
  value=256,
72
  minimum=0,
73
+ maximum=32000,
74
  step=64,
75
  interactive=True,
76
  info="The maximum numbers of new tokens",