multimodalart HF staff commited on
Commit
ce8f63e
β€’
1 Parent(s): 1ce5a2b

dynamic layout

Browse files
Files changed (1) hide show
  1. app_dialogue.py +109 -134
app_dialogue.py CHANGED
@@ -62,7 +62,7 @@ BAN_TOKENS = ( # For documentation puporse. We are not using this list, it is h
62
  EOS_STRINGS = ["<end_of_utterance>", "\nUser:"]
63
  STOP_SUSPECT_LIST = []
64
 
65
- GRADIO_LINK = "https://huggingfacem4-ai-meme-generator.hf.space"
66
  API_TOKEN = os.getenv("HF_AUTH_TOKEN")
67
  IDEFICS_LOGO = "https://huggingface.co/spaces/HuggingFaceM4/idefics_playground/resolve/main/IDEFICS_logo.png"
68
 
@@ -152,7 +152,7 @@ for image_dir in os.listdir("example_images"):
152
  tmp_filename = H.preprocess(H.value)
153
  DEFAULT_IMAGES_TMP_PATH_TO_URL[
154
  tmp_filename
155
- ] = f"https://huggingface.co/spaces/HuggingFaceM4/AI_Meme_Generator/resolve/main/example_images/{image_dir}/{im_path}"
156
 
157
 
158
  # Utils to handle the image markdown display logic
@@ -356,41 +356,6 @@ def resize_with_ratio(image: PIL.Image.Image, fixed_width: int) -> PIL.Image.Ima
356
  return resized_img
357
 
358
 
359
- def make_new_lines(draw, image, font, text_is_too_long, lines, num_lines, num_loops):
360
- max_len_increment = 0
361
- while text_is_too_long and max_len_increment < 10:
362
- new_lines = lines.copy()
363
- last_line_with_backslash = insert_backslash(
364
- new_lines[-1],
365
- max_length=(len(new_lines[-1]) + max_len_increment)
366
- // (num_lines - num_loops),
367
- )
368
- penultimate_line, last_line = (
369
- last_line_with_backslash.split("\n")[0],
370
- last_line_with_backslash.split("\n")[1],
371
- )
372
- new_lines.pop(-1)
373
- new_lines.append(penultimate_line)
374
- new_lines.append(last_line)
375
- # If the we haven't reached the last line, we split it again
376
- if len(new_lines) < num_lines:
377
- new_lines, text_width, text_is_too_long = make_new_lines(
378
- draw=draw,
379
- image=image,
380
- font=font,
381
- text_is_too_long=text_is_too_long,
382
- lines=new_lines,
383
- num_lines=num_lines,
384
- num_loops=num_loops + 1,
385
- )
386
- text_width = max([draw.textlength(line, font) for line in new_lines])
387
- text_is_too_long = text_width > image.width
388
- max_len_increment += 1
389
- if not text_is_too_long:
390
- lines = new_lines
391
- return lines, text_width, text_is_too_long
392
-
393
-
394
  def test_font_size(
395
  draw,
396
  image,
@@ -403,25 +368,75 @@ def test_font_size(
403
  ):
404
  text_width = draw.textlength(text, font)
405
  text_is_too_long = True
406
- lines = [text]
407
- while font.size > min_font and text_is_too_long:
408
- font = ImageFont.truetype(
409
- f"fonts/{font_meme_text}.ttf", size=font.size - font_size_reduction
410
- )
411
- if num_lines == 1:
412
  text_width = draw.textlength(text, font)
413
  text_is_too_long = text_width > image.width
414
- else:
415
- lines, text_width, text_is_too_long = make_new_lines(
416
- draw=draw,
417
- image=image,
418
- font=font,
419
- text_is_too_long=text_is_too_long,
420
- lines=lines,
421
- num_lines=num_lines,
422
- num_loops=0,
423
  )
424
- temp_text = "\n".join(lines)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
425
 
426
  if not text_is_too_long and num_lines > 1:
427
  text = temp_text
@@ -456,7 +471,7 @@ def make_meme_image(
456
  initial_font_size = 80
457
  text_is_too_long = True
458
  num_lines = 0
459
- while text_is_too_long and num_lines < 8:
460
  num_lines += 1
461
  font = ImageFont.truetype(f"fonts/{font_meme_text}.ttf", size=initial_font_size)
462
  text, font, text_width, text_is_too_long = test_font_size(
@@ -479,7 +494,7 @@ def make_meme_image(
479
 
480
  outline_width = 2
481
  text_x = (image_width - text_width) / 2
482
- text_y = image_height - num_lines * font.size - 10 - 2 * num_lines
483
  if text_at_the_top:
484
  text_y = 0
485
 
@@ -548,9 +563,8 @@ def format_user_prompt_with_im_history_and_system_conditioning(
548
 
549
  return resulting_list, current_user_prompt_list
550
 
551
-
552
- # dope_callback = gr.CSVLogger()
553
- # problematic_callback = gr.CSVLogger()
554
 
555
  textbox = gr.Textbox(
556
  placeholder="Upload an image and ask the AI to create a meme!",
@@ -568,16 +582,17 @@ chatbot = gr.Chatbot(
568
  visible=False,
569
  avatar_images=[None, BOT_AVATAR],
570
  )
571
- css = """
572
- .gradio-container{max-width: 970px!important}
573
  h1{display: flex;align-items: center;justify-content: center;gap: .25em}
574
- """
 
575
  with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as demo:
576
  with gr.Row(scale=0.5):
577
  gr.HTML(
578
  """<h1 align="center">AI Meme Generator <span style="font-size: 13px;">powered by <a href="https://huggingface.co/blog/idefics">IDEFICS</a></span><img width=40 height=40 src="https://cdn-uploads.huggingface.co/production/uploads/624bebf604abc7ebb01789af/v770xGti5vH1SYLBgyOO_.png" /></h1>"""
579
  )
580
-
581
  with gr.Row(elem_id="model_selector_row"):
582
  model_selector = gr.Dropdown(
583
  choices=MODELS,
@@ -589,21 +604,22 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
589
  visible=False,
590
  )
591
  with gr.Row(equal_height=True):
592
- with gr.Column(equal_height=1):
 
593
  imagebox = gr.Image(
594
- type="filepath", label="Image to meme", height=400, visible=True
595
  )
596
  with gr.Group():
597
  with gr.Row():
598
  textbox.render()
599
  with gr.Row():
600
- submit_btn = gr.Button(value="▢️ Submit", visible=True)
601
  clear_btn = gr.ClearButton(
602
- [textbox, imagebox, chatbot], value="🧹 Clear"
603
  )
604
- regenerate_btn = gr.Button(value="πŸ”„ Regenerate", visible=True)
605
  upload_btn = gr.UploadButton(
606
- "πŸ“ Upload image", file_types=["image"], visible=False
607
  )
608
  with gr.Accordion(
609
  "Advanced settings", open=False, visible=True
@@ -709,7 +725,7 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
709
  inputs=decoding_strategy,
710
  outputs=top_p,
711
  )
712
- with gr.Column(scale=2):
713
  generated_memes_gallery = gr.Gallery(
714
  # value="Images generated will appear here",
715
  label="IDEFICS Generated Memes",
@@ -717,7 +733,9 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
717
  elem_id="generated_memes_gallery",
718
  show_download_button=True,
719
  show_share_button=True,
720
- ).style(columns=[2], object_fit="contain", height=600)
 
 
721
  with gr.Row(equal_height=True):
722
  with gr.Box(elem_id="gallery_box"):
723
  gallery_type_choice = gr.Radio(
@@ -737,7 +755,7 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
737
  # value= value given by gallery_type_choice,
738
  label="Templates Gallery",
739
  allow_preview=False,
740
- columns=[6],
741
  elem_id="gallery",
742
  show_share_button=False,
743
  height=400,
@@ -880,61 +898,17 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
880
  # yield "", None, chat_history
881
  full_text += acc_text
882
  acc_text = ""
883
-
884
- textbox.submit(
885
- fn=lambda: "", inputs=[], outputs=[generated_memes_gallery], queue=False
886
- ).then(
887
- fn=model_inference,
888
- inputs=[
889
- model_selector,
890
- system_prompt,
891
- textbox,
892
- chatbot,
893
- imagebox,
894
- decoding_strategy,
895
- temperature,
896
- max_new_tokens,
897
- repetition_penalty,
898
- top_p,
899
- all_caps_meme_text,
900
- text_at_the_top,
901
- font_meme_text,
902
- ],
903
- outputs=[textbox, generated_memes_gallery, chatbot],
904
- )
905
- imagebox.upload(
906
- fn=lambda: "", inputs=[], outputs=[generated_memes_gallery], queue=False
907
- ).then(
908
- fn=lambda: "Write a meme about this image.",
909
- inputs=[],
910
- outputs=[textbox],
911
- queue=False,
912
- ).then(
913
- fn=model_inference,
914
- inputs=[
915
- model_selector,
916
- system_prompt,
917
- textbox,
918
- chatbot,
919
- imagebox,
920
- decoding_strategy,
921
- temperature,
922
- max_new_tokens,
923
- repetition_penalty,
924
- top_p,
925
- all_caps_meme_text,
926
- text_at_the_top,
927
- font_meme_text,
928
- ],
929
- outputs=[
930
- textbox,
931
- generated_memes_gallery,
932
- chatbot,
933
  ],
934
- )
935
- submit_btn.click(
936
- fn=lambda: "", inputs=[], outputs=[generated_memes_gallery], queue=False
937
  ).then(
 
938
  fn=model_inference,
939
  inputs=[
940
  model_selector,
@@ -951,11 +925,7 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
951
  text_at_the_top,
952
  font_meme_text,
953
  ],
954
- outputs=[
955
- textbox,
956
- generated_memes_gallery,
957
- chatbot,
958
- ],
959
  )
960
 
961
  def remove_last_turn(chat_history):
@@ -972,6 +942,7 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
972
  fn=remove_last_turn,
973
  inputs=chatbot,
974
  outputs=[chatbot, textbox, generated_memes_gallery],
 
975
  ).then(
976
  fn=model_inference,
977
  inputs=[
@@ -998,13 +969,13 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
998
 
999
  upload_btn.upload(add_file, [upload_btn], [imagebox, upload_btn], queue=False)
1000
  submit_btn.click(
1001
- lambda: gr.update(label="πŸ“ Upload image", interactive=True), [], upload_btn
1002
  )
1003
  textbox.submit(
1004
- lambda: gr.update(label="πŸ“ Upload image", interactive=True), [], upload_btn
1005
  )
1006
  clear_btn.click(
1007
- lambda: gr.update(label="πŸ“ Upload image", interactive=True), [], upload_btn
1008
  )
1009
  gallery_type_choice.change(
1010
  fn=choose_gallery,
@@ -1016,6 +987,10 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
1016
  fn=add_file_gallery,
1017
  inputs=[template_gallery],
1018
  outputs=[textbox, imagebox, generated_memes_gallery],
 
 
 
 
1019
  ).success(
1020
  fn=model_inference,
1021
  inputs=[
@@ -1040,7 +1015,7 @@ with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as de
1040
  ],
1041
  )
1042
  demo.load(
1043
- fn=choose_gallery, inputs=[gallery_type_choice], outputs=[template_gallery]
1044
  )
1045
  demo.queue(concurrency_count=40, max_size=40)
1046
- demo.launch()
 
62
  EOS_STRINGS = ["<end_of_utterance>", "\nUser:"]
63
  STOP_SUSPECT_LIST = []
64
 
65
+ GRADIO_LINK = "https://multimodalart-ai-meme-generator.hf.space"
66
  API_TOKEN = os.getenv("HF_AUTH_TOKEN")
67
  IDEFICS_LOGO = "https://huggingface.co/spaces/HuggingFaceM4/idefics_playground/resolve/main/IDEFICS_logo.png"
68
 
 
152
  tmp_filename = H.preprocess(H.value)
153
  DEFAULT_IMAGES_TMP_PATH_TO_URL[
154
  tmp_filename
155
+ ] = f"https://huggingface.co/spaces/multimodalart/AI_Meme_Generator/resolve/main/example_images/{image_dir}/{im_path}"
156
 
157
 
158
  # Utils to handle the image markdown display logic
 
356
  return resized_img
357
 
358
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
359
  def test_font_size(
360
  draw,
361
  image,
 
368
  ):
369
  text_width = draw.textlength(text, font)
370
  text_is_too_long = True
371
+
372
+ if num_lines == 1:
373
+ while font.size > min_font and text_is_too_long:
374
+ font = ImageFont.truetype(
375
+ f"fonts/{font_meme_text}.ttf", size=font.size - font_size_reduction
376
+ )
377
  text_width = draw.textlength(text, font)
378
  text_is_too_long = text_width > image.width
379
+
380
+ elif num_lines == 2:
381
+ while font.size > min_font and text_is_too_long:
382
+ font = ImageFont.truetype(
383
+ f"fonts/{font_meme_text}.ttf", size=font.size - font_size_reduction
 
 
 
 
384
  )
385
+ max_len_increment = 0
386
+ while (
387
+ text_is_too_long
388
+ and max_len_increment < 10
389
+ and max_len_increment < (len(text)) // 2
390
+ ):
391
+ temp_text = insert_backslash(
392
+ text, max_length=(len(text) + max_len_increment) // 2
393
+ )
394
+ first_line, second_line = (
395
+ temp_text.split("\n")[0],
396
+ temp_text.split("\n")[1],
397
+ )
398
+ text_width = max(
399
+ draw.textlength(first_line, font),
400
+ draw.textlength(second_line, font),
401
+ )
402
+ text_is_too_long = text_width > image.width
403
+ max_len_increment += 1
404
+
405
+ elif num_lines == 3:
406
+ while font.size > min_font and text_is_too_long:
407
+ font = ImageFont.truetype(
408
+ f"fonts/{font_meme_text}.ttf", size=font.size - font_size_reduction
409
+ )
410
+ max_len_incr_1_split = 0
411
+ while text_is_too_long and max_len_incr_1_split < 10:
412
+ first_temp_text = insert_backslash(
413
+ text, max_length=(len(text) + max_len_incr_1_split) // 3
414
+ )
415
+ first_line, second_line = (
416
+ first_temp_text.split("\n")[0],
417
+ first_temp_text.split("\n")[1],
418
+ )
419
+ max_len_incr_2_split = 0
420
+ while text_is_too_long and max_len_incr_2_split < 10:
421
+ temp_text_second_line = insert_backslash(
422
+ second_line,
423
+ max_length=(len(second_line) + max_len_incr_2_split) // 2,
424
+ )
425
+ second_line_1, second_line_2 = (
426
+ temp_text_second_line.split("\n")[0],
427
+ temp_text_second_line.split("\n")[1],
428
+ )
429
+ temp_text = first_line + "\n" + second_line_1 + "\n" + second_line_2
430
+ text_width = max(
431
+ draw.textlength(first_line, font),
432
+ draw.textlength(second_line_1, font),
433
+ draw.textlength(second_line_2, font),
434
+ )
435
+ text_is_too_long = text_width > image.width
436
+ max_len_incr_2_split += 1
437
+ max_len_incr_1_split += 1
438
+ else:
439
+ raise (ValueError("num_lines can only be 1, 2 or 3"))
440
 
441
  if not text_is_too_long and num_lines > 1:
442
  text = temp_text
 
471
  initial_font_size = 80
472
  text_is_too_long = True
473
  num_lines = 0
474
+ while text_is_too_long and num_lines < 3:
475
  num_lines += 1
476
  font = ImageFont.truetype(f"fonts/{font_meme_text}.ttf", size=initial_font_size)
477
  text, font, text_width, text_is_too_long = test_font_size(
 
494
 
495
  outline_width = 2
496
  text_x = (image_width - text_width) / 2
497
+ text_y = image_height - num_lines * font.size - 10 - num_lines
498
  if text_at_the_top:
499
  text_y = 0
500
 
 
563
 
564
  return resulting_list, current_user_prompt_list
565
 
566
+ def expand_layout():
567
+ return gr.Column(scale=2), gr.Gallery(height=682)
 
568
 
569
  textbox = gr.Textbox(
570
  placeholder="Upload an image and ask the AI to create a meme!",
 
582
  visible=False,
583
  avatar_images=[None, BOT_AVATAR],
584
  )
585
+ css='''
586
+ .gradio-container{max-width: 1000px!important}
587
  h1{display: flex;align-items: center;justify-content: center;gap: .25em}
588
+ *{transition: width 0.5s ease, flex-grow 0.5s ease}
589
+ '''
590
  with gr.Blocks(title="AI Meme Generator", theme=gr.themes.Base(), css=css) as demo:
591
  with gr.Row(scale=0.5):
592
  gr.HTML(
593
  """<h1 align="center">AI Meme Generator <span style="font-size: 13px;">powered by <a href="https://huggingface.co/blog/idefics">IDEFICS</a></span><img width=40 height=40 src="https://cdn-uploads.huggingface.co/production/uploads/624bebf604abc7ebb01789af/v770xGti5vH1SYLBgyOO_.png" /></h1>"""
594
  )
595
+
596
  with gr.Row(elem_id="model_selector_row"):
597
  model_selector = gr.Dropdown(
598
  choices=MODELS,
 
604
  visible=False,
605
  )
606
  with gr.Row(equal_height=True):
607
+ #scale=2 when expanded
608
+ with gr.Column(scale=4, min_width=250) as upload_area:
609
  imagebox = gr.Image(
610
+ type="filepath", label="Image to meme", height=272, visible=True
611
  )
612
  with gr.Group():
613
  with gr.Row():
614
  textbox.render()
615
  with gr.Row():
616
+ submit_btn = gr.Button(value="▢️ Submit", visible=True, min_width=120)
617
  clear_btn = gr.ClearButton(
618
+ [textbox, imagebox, chatbot], value="🧹 Clear", min_width=120
619
  )
620
+ regenerate_btn = gr.Button(value="πŸ”„ Regenerate", visible=True, min_width=120)
621
  upload_btn = gr.UploadButton(
622
+ "πŸ“ Upload image", file_types=["image"], visible=False, min_width=120
623
  )
624
  with gr.Accordion(
625
  "Advanced settings", open=False, visible=True
 
725
  inputs=decoding_strategy,
726
  outputs=top_p,
727
  )
728
+ with gr.Column(scale=5) as result_area:
729
  generated_memes_gallery = gr.Gallery(
730
  # value="Images generated will appear here",
731
  label="IDEFICS Generated Memes",
 
733
  elem_id="generated_memes_gallery",
734
  show_download_button=True,
735
  show_share_button=True,
736
+ columns=[2],
737
+ object_fit="contain",height=428
738
+ ) #height 600 when expanded
739
  with gr.Row(equal_height=True):
740
  with gr.Box(elem_id="gallery_box"):
741
  gallery_type_choice = gr.Radio(
 
755
  # value= value given by gallery_type_choice,
756
  label="Templates Gallery",
757
  allow_preview=False,
758
+ columns=6,
759
  elem_id="gallery",
760
  show_share_button=False,
761
  height=400,
 
898
  # yield "", None, chat_history
899
  full_text += acc_text
900
  acc_text = ""
901
+ gr.on(
902
+ triggers=[
903
+ textbox.submit,
904
+ imagebox.upload,
905
+ submit_btn.click
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
906
  ],
907
+ fn=expand_layout,
908
+ outputs=[upload_area, generated_memes_gallery],
909
+ queue=False
910
  ).then(
911
+ fn=lambda: "", inputs=[], outputs=[generated_memes_gallery], queue=False).then(
912
  fn=model_inference,
913
  inputs=[
914
  model_selector,
 
925
  text_at_the_top,
926
  font_meme_text,
927
  ],
928
+ outputs=[textbox, generated_memes_gallery, chatbot],
 
 
 
 
929
  )
930
 
931
  def remove_last_turn(chat_history):
 
942
  fn=remove_last_turn,
943
  inputs=chatbot,
944
  outputs=[chatbot, textbox, generated_memes_gallery],
945
+ queue=False
946
  ).then(
947
  fn=model_inference,
948
  inputs=[
 
969
 
970
  upload_btn.upload(add_file, [upload_btn], [imagebox, upload_btn], queue=False)
971
  submit_btn.click(
972
+ lambda: gr.update(label="πŸ“ Upload image", interactive=True), [], upload_btn, queue=False
973
  )
974
  textbox.submit(
975
+ lambda: gr.update(label="πŸ“ Upload image", interactive=True), [], upload_btn, queue=False
976
  )
977
  clear_btn.click(
978
+ lambda: gr.update(label="πŸ“ Upload image", interactive=True), [], upload_btn, queue=False
979
  )
980
  gallery_type_choice.change(
981
  fn=choose_gallery,
 
987
  fn=add_file_gallery,
988
  inputs=[template_gallery],
989
  outputs=[textbox, imagebox, generated_memes_gallery],
990
+ ).success(
991
+ fn=expand_layout,
992
+ outputs=[upload_area, generated_memes_gallery],
993
+ queue=False
994
  ).success(
995
  fn=model_inference,
996
  inputs=[
 
1015
  ],
1016
  )
1017
  demo.load(
1018
+ fn=choose_gallery, inputs=[gallery_type_choice], outputs=[template_gallery], queue=False
1019
  )
1020
  demo.queue(concurrency_count=40, max_size=40)
1021
+ demo.launch()