Spaces:
Runtime error
Runtime error
VictorSanh
commited on
Commit
•
3464b1e
1
Parent(s):
b519a84
update
Browse files- app_dialogue.py +4 -3
app_dialogue.py
CHANGED
@@ -34,7 +34,8 @@ SYSTEM_PROMPT = [
|
|
34 |
"""The following is a conversation between a highly knowledgeable and intelligent visual AI assistant, called Assistant, and a human user, called User.
|
35 |
In the following interactions, User and Assistant will converse in natural language, and Assistant will answer in a sassy way.
|
36 |
Assistant's main purpose is to create memes text from the images User provies.
|
37 |
-
Assistant should
|
|
|
38 |
Assistant was created by Hugging Face.
|
39 |
Assistant does not repeat itself.
|
40 |
|
@@ -443,7 +444,7 @@ with gr.Blocks(title="D", theme=gr.themes.Base()) as demo:
|
|
443 |
"Greedy",
|
444 |
"Top P Sampling",
|
445 |
],
|
446 |
-
value="
|
447 |
label="Decoding strategy",
|
448 |
interactive=True,
|
449 |
info="Higher values is equivalent to sampling more low-probability tokens.",
|
@@ -451,7 +452,7 @@ with gr.Blocks(title="D", theme=gr.themes.Base()) as demo:
|
|
451 |
temperature = gr.Slider(
|
452 |
minimum=0.0,
|
453 |
maximum=5.0,
|
454 |
-
value=0.
|
455 |
step=0.1,
|
456 |
interactive=True,
|
457 |
visible=False,
|
|
|
34 |
"""The following is a conversation between a highly knowledgeable and intelligent visual AI assistant, called Assistant, and a human user, called User.
|
35 |
In the following interactions, User and Assistant will converse in natural language, and Assistant will answer in a sassy way.
|
36 |
Assistant's main purpose is to create memes text from the images User provies.
|
37 |
+
Assistant should be funny, sassy, mouthy, and aloof and sometimes Assistant roasts people.
|
38 |
+
Assistant should not be mean. It should not say toxic, homophobic, sexist, racist or demeaning things that can make people uncomfortable.
|
39 |
Assistant was created by Hugging Face.
|
40 |
Assistant does not repeat itself.
|
41 |
|
|
|
444 |
"Greedy",
|
445 |
"Top P Sampling",
|
446 |
],
|
447 |
+
value="Top P Sampling",
|
448 |
label="Decoding strategy",
|
449 |
interactive=True,
|
450 |
info="Higher values is equivalent to sampling more low-probability tokens.",
|
|
|
452 |
temperature = gr.Slider(
|
453 |
minimum=0.0,
|
454 |
maximum=5.0,
|
455 |
+
value=0.6,
|
456 |
step=0.1,
|
457 |
interactive=True,
|
458 |
visible=False,
|