lewtun HF staff commited on
Commit
97f74a7
1 Parent(s): cdff18d

Cache examples

Browse files
Files changed (1) hide show
  1. app.py +15 -2
app.py CHANGED
@@ -66,7 +66,8 @@ def generate(instruction, temperature, max_new_tokens, top_p, length_penalty):
66
  top_p=top_p,
67
  temperature=temperature,
68
  max_new_tokens=max_new_tokens,
69
- early_stopping=True,
 
70
  length_penalty=length_penalty,
71
  eos_token_id=tokenizer.eos_token_id,
72
  pad_token_id=tokenizer.eos_token_id,
@@ -97,6 +98,12 @@ examples = [
97
  ]
98
 
99
 
 
 
 
 
 
 
100
  with gr.Blocks(theme=theme) as demo:
101
  with gr.Column():
102
  gr.Markdown(
@@ -120,7 +127,13 @@ with gr.Blocks(theme=theme) as demo:
120
  # placeholder="Here will be the answer to your question",
121
  # )
122
  submit = gr.Button("Generate", variant="primary")
123
- gr.Examples(examples=examples, inputs=[instruction])
 
 
 
 
 
 
124
 
125
  with gr.Column(scale=1):
126
  temperature = gr.Slider(
 
66
  top_p=top_p,
67
  temperature=temperature,
68
  max_new_tokens=max_new_tokens,
69
+ # early_stopping=True, # Not sure if we want this
70
+ top_k=0, # Maybe set top_k=40 if results are bad
71
  length_penalty=length_penalty,
72
  eos_token_id=tokenizer.eos_token_id,
73
  pad_token_id=tokenizer.eos_token_id,
 
98
  ]
99
 
100
 
101
+ def process_example(args):
102
+ for x in generate(args):
103
+ pass
104
+ return x
105
+
106
+
107
  with gr.Blocks(theme=theme) as demo:
108
  with gr.Column():
109
  gr.Markdown(
 
127
  # placeholder="Here will be the answer to your question",
128
  # )
129
  submit = gr.Button("Generate", variant="primary")
130
+ gr.Examples(
131
+ examples=examples,
132
+ inputs=[instruction],
133
+ cache_examples=True,
134
+ fn=process_example,
135
+ outputs=[output],
136
+ )
137
 
138
  with gr.Column(scale=1):
139
  temperature = gr.Slider(