Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -32,9 +32,9 @@ def run_generation(
|
|
32 |
temperature,
|
33 |
top_k,
|
34 |
max_new_tokens,
|
35 |
-
no_repeat_ngram_size=4,
|
36 |
-
length_penalty=1.0,
|
37 |
repetition_penalty=1.1,
|
|
|
|
|
38 |
use_generation_config=False,
|
39 |
):
|
40 |
st = time.perf_counter()
|
@@ -55,6 +55,8 @@ def run_generation(
|
|
55 |
top_p=top_p,
|
56 |
temperature=float(temperature),
|
57 |
top_k=top_k,
|
|
|
|
|
58 |
no_repeat_ngram_size=no_repeat_ngram_size,
|
59 |
)
|
60 |
t = Thread(target=model.generate, kwargs=generate_kwargs)
|
@@ -90,7 +92,7 @@ with gr.Blocks() as demo:
|
|
90 |
with gr.Row():
|
91 |
with gr.Column(scale=4):
|
92 |
user_text = gr.Textbox(
|
93 |
-
|
94 |
label="User input",
|
95 |
)
|
96 |
model_output = gr.Textbox(label="Model output", lines=10, interactive=False)
|
@@ -121,23 +123,38 @@ with gr.Blocks() as demo:
|
|
121 |
interactive=True,
|
122 |
label="Top-k",
|
123 |
)
|
124 |
-
|
125 |
-
minimum=0.
|
126 |
-
maximum=5
|
127 |
-
value=
|
128 |
step=0.1,
|
129 |
interactive=True,
|
130 |
-
label="
|
131 |
)
|
132 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
133 |
user_text.submit(
|
134 |
run_generation,
|
135 |
-
[user_text, top_p, temperature, top_k, max_new_tokens],
|
136 |
model_output,
|
137 |
)
|
138 |
button_submit.click(
|
139 |
run_generation,
|
140 |
-
[user_text, top_p, temperature, top_k, max_new_tokens],
|
141 |
model_output,
|
142 |
)
|
143 |
|
|
|
32 |
temperature,
|
33 |
top_k,
|
34 |
max_new_tokens,
|
|
|
|
|
35 |
repetition_penalty=1.1,
|
36 |
+
length_penalty=1.0,
|
37 |
+
no_repeat_ngram_size=4,
|
38 |
use_generation_config=False,
|
39 |
):
|
40 |
st = time.perf_counter()
|
|
|
55 |
top_p=top_p,
|
56 |
temperature=float(temperature),
|
57 |
top_k=top_k,
|
58 |
+
repetition_penalty=repetition_penalty,
|
59 |
+
length_penalty=length_penalty,
|
60 |
no_repeat_ngram_size=no_repeat_ngram_size,
|
61 |
)
|
62 |
t = Thread(target=model.generate, kwargs=generate_kwargs)
|
|
|
92 |
with gr.Row():
|
93 |
with gr.Column(scale=4):
|
94 |
user_text = gr.Textbox(
|
95 |
+
value="Can you write an email to Jerome letting him know that thy're taking the hobbits to Isengard?",
|
96 |
label="User input",
|
97 |
)
|
98 |
model_output = gr.Textbox(label="Model output", lines=10, interactive=False)
|
|
|
123 |
interactive=True,
|
124 |
label="Top-k",
|
125 |
)
|
126 |
+
repetition_penalty = gr.Slider(
|
127 |
+
minimum=0.9,
|
128 |
+
maximum=2.5,
|
129 |
+
value=1.1,
|
130 |
step=0.1,
|
131 |
interactive=True,
|
132 |
+
label="Repetition Penalty",
|
133 |
)
|
134 |
+
length_penalty = gr.Slider(
|
135 |
+
minimum=0.8,
|
136 |
+
maximum=1.5,
|
137 |
+
value=1.0,
|
138 |
+
step=0.1,
|
139 |
+
interactive=True,
|
140 |
+
label="Length Penalty",
|
141 |
+
)
|
142 |
+
# temperature = gr.Slider(
|
143 |
+
# minimum=0.1,
|
144 |
+
# maximum=5.0,
|
145 |
+
# value=0.8,
|
146 |
+
# step=0.1,
|
147 |
+
# interactive=True,
|
148 |
+
# label="Temperature",
|
149 |
+
# )
|
150 |
user_text.submit(
|
151 |
run_generation,
|
152 |
+
[user_text, top_p, temperature, top_k, max_new_tokens, repetition_penalty, length_penalty],
|
153 |
model_output,
|
154 |
)
|
155 |
button_submit.click(
|
156 |
run_generation,
|
157 |
+
[user_text, top_p, temperature, top_k, max_new_tokens, repetition_penalty, length_penalty],
|
158 |
model_output,
|
159 |
)
|
160 |
|