paralym commited on
Commit
4eeba3f
1 Parent(s): 3e9ca50

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -7
app.py CHANGED
@@ -54,6 +54,7 @@ from huggingface_hub import HfApi
54
  from huggingface_hub import login
55
  from huggingface_hub import revision_exists
56
 
 
57
  api = HfApi()
58
  repo_name = "paralym/pangea_logs"
59
 
@@ -213,7 +214,7 @@ def add_message(history, message):
213
 
214
 
215
  @spaces.GPU
216
- def bot(history):
217
  print("### turn start history",history)
218
  print("### turn start conv",our_chatbot.conversation)
219
  text = history[-1][0]
@@ -350,10 +351,10 @@ def bot(history):
350
  inputs=input_ids,
351
  streamer=streamer,
352
  images=image_tensor,
353
- max_new_tokens=1024,
354
  do_sample=True,
355
- temperature=0.2,
356
- num_beams=1,
 
357
  use_cache=False,
358
  stopping_criteria=[stopping_criteria],
359
  )
@@ -385,7 +386,7 @@ def bot(history):
385
  for upload_img in all_image_path:
386
  api.upload_file(
387
  path_or_fileobj=upload_img,
388
- path_in_repo=upload_img.replace("./logs/"),
389
  repo_id=repo_name,
390
  repo_type="dataset",
391
  # revision=revision,
@@ -394,7 +395,7 @@ def bot(history):
394
  # upload json
395
  api.upload_file(
396
  path_or_fileobj=get_conv_log_filename(),
397
- path_in_repo=get_conv_log_filename().replace("./logs/"),
398
  repo_id=repo_name,
399
  repo_type="dataset")
400
 
@@ -416,6 +417,31 @@ with gr.Blocks(
416
  gr.HTML(html_header)
417
 
418
  with gr.Column():
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
419
  with gr.Row():
420
  chatbot = gr.Chatbot([], elem_id="Pangea", bubble_full_width=False, height=750)
421
 
@@ -608,7 +634,7 @@ with gr.Blocks(
608
  chat_msg = chat_input.submit(
609
  add_message, [chatbot, chat_input], [chatbot, chat_input]
610
  )
611
- bot_msg = chat_msg.then(bot, chatbot, chatbot, api_name="bot_response")
612
  bot_msg.then(lambda: gr.MultimodalTextbox(interactive=True), None, [chat_input])
613
 
614
  # chatbot.like(print_like_dislike, None, None)
 
54
  from huggingface_hub import login
55
  from huggingface_hub import revision_exists
56
 
57
+
58
  api = HfApi()
59
  repo_name = "paralym/pangea_logs"
60
 
 
214
 
215
 
216
  @spaces.GPU
217
+ def bot(history, temperature, top_p, max_output_tokens):
218
  print("### turn start history",history)
219
  print("### turn start conv",our_chatbot.conversation)
220
  text = history[-1][0]
 
351
  inputs=input_ids,
352
  streamer=streamer,
353
  images=image_tensor,
 
354
  do_sample=True,
355
+ temperature=temperature,
356
+ top_p=top_p,
357
+ max_new_tokens=max_output_tokens,
358
  use_cache=False,
359
  stopping_criteria=[stopping_criteria],
360
  )
 
386
  for upload_img in all_image_path:
387
  api.upload_file(
388
  path_or_fileobj=upload_img,
389
+ path_in_repo=upload_img.replace("./logs/", ""),
390
  repo_id=repo_name,
391
  repo_type="dataset",
392
  # revision=revision,
 
395
  # upload json
396
  api.upload_file(
397
  path_or_fileobj=get_conv_log_filename(),
398
+ path_in_repo=get_conv_log_filename().replace("./logs/", ""),
399
  repo_id=repo_name,
400
  repo_type="dataset")
401
 
 
417
  gr.HTML(html_header)
418
 
419
  with gr.Column():
420
+ with gr.Accordion("Parameters", open=False) as parameter_row:
421
+ temperature = gr.Slider(
422
+ minimum=0.0,
423
+ maximum=1.0,
424
+ value=0.7,
425
+ step=0.1,
426
+ interactive=True,
427
+ label="Temperature",
428
+ )
429
+ top_p = gr.Slider(
430
+ minimum=0.0,
431
+ maximum=1.0,
432
+ value=1,
433
+ step=0.1,
434
+ interactive=True,
435
+ label="Top P",
436
+ )
437
+ max_output_tokens = gr.Slider(
438
+ minimum=0,
439
+ maximum=8192,
440
+ value=4096,
441
+ step=256,
442
+ interactive=True,
443
+ label="Max output tokens",
444
+ )
445
  with gr.Row():
446
  chatbot = gr.Chatbot([], elem_id="Pangea", bubble_full_width=False, height=750)
447
 
 
634
  chat_msg = chat_input.submit(
635
  add_message, [chatbot, chat_input], [chatbot, chat_input]
636
  )
637
+ bot_msg = chat_msg.then(bot, [chatbot,temperature, top_p, max_output_tokens], chatbot, api_name="bot_response")
638
  bot_msg.then(lambda: gr.MultimodalTextbox(interactive=True), None, [chat_input])
639
 
640
  # chatbot.like(print_like_dislike, None, None)