Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
use gr.on for more components
Browse files- app_dialogue.py +8 -69
app_dialogue.py
CHANGED
@@ -842,19 +842,7 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
|
|
842 |
generation_args["do_sample"] = True
|
843 |
generation_args["top_p"] = top_p
|
844 |
|
845 |
-
|
846 |
-
# Case where there is no image OR the image is passed as `<fake_token_around_image><image:IMAGE_URL><fake_token_around_image>`
|
847 |
-
chat_history.append([prompt_list_to_markdown(user_prompt_list), ""])
|
848 |
-
else:
|
849 |
-
# Case where the image is passed through the Image Box.
|
850 |
-
# Convert the image into base64 for both passing it through the chat history and
|
851 |
-
# displaying the image inside the same bubble as the text.
|
852 |
-
chat_history.append(
|
853 |
-
[
|
854 |
-
f"{prompt_list_to_markdown([image] + user_prompt_list)}",
|
855 |
-
"",
|
856 |
-
]
|
857 |
-
)
|
858 |
|
859 |
query = prompt_list_to_tgi_input(formated_prompt_list)
|
860 |
all_meme_images = []
|
@@ -881,7 +869,13 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
|
|
881 |
return user_prompt_str, all_meme_images, chat_history
|
882 |
|
883 |
gr.on(
|
884 |
-
triggers=[
|
|
|
|
|
|
|
|
|
|
|
|
|
885 |
fn=expand_layout,
|
886 |
outputs=[upload_area, generated_memes_gallery],
|
887 |
queue=False,
|
@@ -912,7 +906,6 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
|
|
912 |
return chat_history, "", ""
|
913 |
last_interaction = chat_history[-1]
|
914 |
chat_history = chat_history[:-1]
|
915 |
-
last_interaction[0] = re.sub(r"!\[]\(/file=.*?\)", "", last_interaction[0])
|
916 |
chat_update = chat_history
|
917 |
text_update = last_interaction[0]
|
918 |
return chat_update, text_update, ""
|
@@ -922,36 +915,6 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
|
|
922 |
inputs=chatbot,
|
923 |
outputs=[chatbot, textbox, generated_memes_gallery],
|
924 |
queue=False,
|
925 |
-
).then(
|
926 |
-
fn=lambda: regenerate_btn.update(interactive=False),
|
927 |
-
inputs=[],
|
928 |
-
outputs=[],
|
929 |
-
).then(
|
930 |
-
fn=model_inference,
|
931 |
-
inputs=[
|
932 |
-
model_selector,
|
933 |
-
system_prompt,
|
934 |
-
textbox,
|
935 |
-
chatbot,
|
936 |
-
imagebox,
|
937 |
-
decoding_strategy,
|
938 |
-
temperature,
|
939 |
-
max_new_tokens,
|
940 |
-
repetition_penalty,
|
941 |
-
top_p,
|
942 |
-
all_caps_meme_text,
|
943 |
-
text_at_the_top,
|
944 |
-
font_meme_text,
|
945 |
-
],
|
946 |
-
outputs=[
|
947 |
-
textbox,
|
948 |
-
generated_memes_gallery,
|
949 |
-
chatbot,
|
950 |
-
],
|
951 |
-
).then(
|
952 |
-
fn=lambda: regenerate_btn.update(interactive=True),
|
953 |
-
inputs=[],
|
954 |
-
outputs=[],
|
955 |
)
|
956 |
|
957 |
gallery_type_choice.change(
|
@@ -965,30 +928,6 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
|
|
965 |
inputs=[template_gallery],
|
966 |
outputs=[textbox, imagebox, generated_memes_gallery],
|
967 |
queue=False,
|
968 |
-
).success(
|
969 |
-
fn=expand_layout, outputs=[upload_area, generated_memes_gallery], queue=False
|
970 |
-
).success(
|
971 |
-
fn=model_inference,
|
972 |
-
inputs=[
|
973 |
-
model_selector,
|
974 |
-
system_prompt,
|
975 |
-
textbox,
|
976 |
-
chatbot,
|
977 |
-
imagebox,
|
978 |
-
decoding_strategy,
|
979 |
-
temperature,
|
980 |
-
max_new_tokens,
|
981 |
-
repetition_penalty,
|
982 |
-
top_p,
|
983 |
-
all_caps_meme_text,
|
984 |
-
text_at_the_top,
|
985 |
-
font_meme_text,
|
986 |
-
],
|
987 |
-
outputs=[
|
988 |
-
textbox,
|
989 |
-
generated_memes_gallery,
|
990 |
-
chatbot,
|
991 |
-
],
|
992 |
)
|
993 |
demo.load(
|
994 |
fn=choose_gallery,
|
|
|
842 |
generation_args["do_sample"] = True
|
843 |
generation_args["top_p"] = top_p
|
844 |
|
845 |
+
chat_history.append([prompt_list_to_markdown(user_prompt_list), ""])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
846 |
|
847 |
query = prompt_list_to_tgi_input(formated_prompt_list)
|
848 |
all_meme_images = []
|
|
|
869 |
return user_prompt_str, all_meme_images, chat_history
|
870 |
|
871 |
gr.on(
|
872 |
+
triggers=[
|
873 |
+
textbox.submit,
|
874 |
+
imagebox.upload,
|
875 |
+
submit_btn.click,
|
876 |
+
template_gallery.select,
|
877 |
+
regenerate_btn.click,
|
878 |
+
],
|
879 |
fn=expand_layout,
|
880 |
outputs=[upload_area, generated_memes_gallery],
|
881 |
queue=False,
|
|
|
906 |
return chat_history, "", ""
|
907 |
last_interaction = chat_history[-1]
|
908 |
chat_history = chat_history[:-1]
|
|
|
909 |
chat_update = chat_history
|
910 |
text_update = last_interaction[0]
|
911 |
return chat_update, text_update, ""
|
|
|
915 |
inputs=chatbot,
|
916 |
outputs=[chatbot, textbox, generated_memes_gallery],
|
917 |
queue=False,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
918 |
)
|
919 |
|
920 |
gallery_type_choice.change(
|
|
|
928 |
inputs=[template_gallery],
|
929 |
outputs=[textbox, imagebox, generated_memes_gallery],
|
930 |
queue=False,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
931 |
)
|
932 |
demo.load(
|
933 |
fn=choose_gallery,
|