ysharma HF staff commited on
Commit
6934ad9
1 Parent(s): 5b75fd3

Update app_dialogue.py

Browse files
Files changed (1) hide show
  1. app_dialogue.py +20 -20
app_dialogue.py CHANGED
@@ -554,7 +554,26 @@ def model_generation(
554
 
555
  return generated_text
556
 
557
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
558
  dope_callback = gr.CSVLogger()
559
  dope_hf_callback = gr.HuggingFaceDatasetSaver(
560
  hf_token=os.getenv("HF_AUTH_TOKEN"),
@@ -876,25 +895,6 @@ And so, the story of Mulan and Shrek's romance came to an end, leaving a lasting
876
  print(f"chat_history (ELSE IF image is available) is -{chat_history}")
877
  return "", None, chat_history
878
 
879
- def process_example(message, image):
880
- print("********* process_example **********")
881
- clear_msg, image_value, chat = model_inference(
882
- user_prompt_str=message,
883
- chat_history=[],
884
- image=image,
885
- decoding_strategy="Greedy",
886
- num_beams=None,
887
- temperature=None,
888
- no_repeat_ngram_size=None,
889
- max_new_tokens=512,
890
- min_length=16,
891
- repetition_penalty=None,
892
- length_penalty=None,
893
- top_k=None,
894
- top_p=0.95,
895
- penalty_alpha=None,
896
- )
897
- return clear_msg, image_value, chat
898
 
899
  textbox.submit(
900
  fn=model_inference,
 
554
 
555
  return generated_text
556
 
557
+ def process_example(message, image):
558
+ print("********* process_example **********")
559
+ clear_msg, image_value, chat = model_inference(
560
+ user_prompt_str=message,
561
+ chat_history=[],
562
+ image=image,
563
+ decoding_strategy="Greedy",
564
+ num_beams=None,
565
+ temperature=None,
566
+ no_repeat_ngram_size=None,
567
+ max_new_tokens=512,
568
+ min_length=16,
569
+ repetition_penalty=None,
570
+ length_penalty=None,
571
+ top_k=None,
572
+ top_p=0.95,
573
+ penalty_alpha=None,
574
+ )
575
+ return clear_msg, image_value, chat
576
+
577
  dope_callback = gr.CSVLogger()
578
  dope_hf_callback = gr.HuggingFaceDatasetSaver(
579
  hf_token=os.getenv("HF_AUTH_TOKEN"),
 
895
  print(f"chat_history (ELSE IF image is available) is -{chat_history}")
896
  return "", None, chat_history
897
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
898
 
899
  textbox.submit(
900
  fn=model_inference,