JJteam commited on
Commit
f3d9e6b
1 Parent(s): b83a2e1

adding example bar

Browse files
Dockerfile CHANGED
@@ -2,7 +2,7 @@ FROM python:3.10.9
2
 
3
  WORKDIR /src
4
 
5
- COPY ./MM-REACT /src/
6
 
7
  COPY ./requirements.txt /src/requirements.txt
8
 
@@ -12,7 +12,7 @@ RUN pip install --no-cache-dir /src/langchain-0.0.94-py3-none-any.whl
12
 
13
  RUN pip install --no-cache-dir --upgrade -r /src/requirements.txt
14
 
15
- WORKDIR /src
16
 
17
 
18
  CMD ["python", "app.py", "--port", "7860", "--openAIModel", "azureChatGPT", "--noIntermediateConv"]
 
2
 
3
  WORKDIR /src
4
 
5
+ COPY ./MM-REACT /src/MM-REACT
6
 
7
  COPY ./requirements.txt /src/requirements.txt
8
 
 
12
 
13
  RUN pip install --no-cache-dir --upgrade -r /src/requirements.txt
14
 
15
+ WORKDIR /src/MM-REACT
16
 
17
 
18
  CMD ["python", "app.py", "--port", "7860", "--openAIModel", "azureChatGPT", "--noIntermediateConv"]
MM-REACT/app.py CHANGED
@@ -211,7 +211,14 @@ def load_chain(history, log_state):
211
  get_logger().warning("langchain reloaded", extra=properties)
212
  history = []
213
  history.append(("Show me what you got!", "Hi Human, Please upload an image to get started!"))
214
- return history, history, chain, log_state, gr.Textbox.update(visible=True), gr.Button.update(visible=True), gr.UploadButton.update(visible=True)
 
 
 
 
 
 
 
215
 
216
 
217
  # executes input typed by human
@@ -390,49 +397,90 @@ def init_and_kick_off():
390
  # initalize chatWrapper
391
  chat = ChatWrapper()
392
 
393
- with gr.Blocks() as block:
 
 
394
  llm_state = gr.State()
395
  history_state = gr.State()
396
  chain_state = gr.State()
397
  log_state = gr.State()
398
 
399
- reset_btn = gr.Button(value="!!!CLICK to wake up MM-REACT!!!", variant="secondary", elem_id="resetbtn").style(full_width=True)
400
 
 
 
401
  with gr.Row():
402
- chatbot = gr.Chatbot(elem_id="chatbot").style(height=620)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
403
 
404
  with gr.Row():
405
  with gr.Column(scale=0.75):
406
- message = gr.Textbox(label="What's on your mind??",
407
- placeholder="What's the answer to life, the universe, and everything?",
408
  lines=1, visible=False)
409
  with gr.Column(scale=0.15):
410
  submit = gr.Button(value="Send", variant="secondary", visible=False).style(full_width=True)
411
  with gr.Column(scale=0.10, min_width=0):
412
- btn = gr.UploadButton("📁", file_types=["image"], visible=False).style(full_width=True)
413
-
414
- # with gr.Row():
415
- # with gr.Column():
416
- # example1Image = gr.Image("images/money.png", interactive=False).style(height=100, width=100)
417
- # with gr.Column():
418
- # example1ImagePath = gr.Text("images/money.png", interactive=False, visible=False)
419
- # with gr.Column():
420
- # example1ImageButton = gr.Button(value="Try it", variant="secondary").style(full_width=True)
421
- # example1ImageButton.click(add_image_with_path, inputs=[history_state, chain_state, example1ImagePath],
422
- # outputs=[history_state, chatbot])
423
 
424
 
425
- message.submit(chat, inputs=[message, history_state, chain_state, log_state],
426
- outputs=[chatbot, history_state, message])
427
 
428
- submit.click(chat, inputs=[message, history_state, chain_state, log_state],
429
- outputs=[chatbot, history_state, message])
430
 
431
  btn.upload(add_image, inputs=[history_state, chain_state, btn, log_state], outputs=[history_state, chatbot])
432
 
433
  # load the chain
434
- reset_btn.click(load_chain, inputs=[history_state, log_state], outputs=[chatbot, history_state, chain_state, log_state, message, submit, btn])
435
-
 
 
 
 
 
 
 
 
436
  # launch the app
437
  block.launch(server_name="0.0.0.0", server_port = ARGS.port)
438
 
@@ -440,17 +488,11 @@ if __name__ == '__main__':
440
  parser = argparse.ArgumentParser()
441
 
442
  parser.add_argument('--port', type=int, required=False, default=7860)
443
- parser.add_argument('--openAIModel', type=str, required=False, default='openAIGPT35')
444
  parser.add_argument('--noIntermediateConv', default=False, action='store_true', help='if this flag is turned on no intermediate conversation should be shown')
445
 
446
  global ARGS
447
  ARGS = parser.parse_args()
448
 
449
  init_and_kick_off()
450
-
451
-
452
- # python app.py --port 7860 --openAIModel 'openAIGPT35'
453
- # python app.py --port 7860 --openAIModel 'azureTextDavinci003'
454
- # python app.py --port 7861 --openAIModel 'azureChatGPT'
455
- # python app.py --port 7860 --openAIModel 'azureChatGPT' --noIntermediateConv
456
- # python app.py --port 7862 --openAIModel 'azureGPT35turbo' --noIntermediateConv
 
211
  get_logger().warning("langchain reloaded", extra=properties)
212
  history = []
213
  history.append(("Show me what you got!", "Hi Human, Please upload an image to get started!"))
214
+
215
+ return history, history, chain, log_state, \
216
+ gr.Textbox.update(visible=True), \
217
+ gr.Button.update(visible=True), \
218
+ gr.UploadButton.update(visible=True), \
219
+ gr.Row.update(visible=True), \
220
+ gr.HTML.update(visible=True), \
221
+ gr.Button.update(variant="secondary")
222
 
223
 
224
  # executes input typed by human
 
397
  # initalize chatWrapper
398
  chat = ChatWrapper()
399
 
400
+ exampleTitle = """<h3>Examples to start conversation..</h3>"""
401
+
402
+ with gr.Blocks(css="#tryButton {width: 120px;}") as block:
403
  llm_state = gr.State()
404
  history_state = gr.State()
405
  chain_state = gr.State()
406
  log_state = gr.State()
407
 
408
+ reset_btn = gr.Button(value="!!!CLICK to wake up MM-REACT!!!", variant="primary", elem_id="resetbtn").style(full_width=True)
409
 
410
+ example_image_size = 110
411
+ button_variant = "primary"
412
  with gr.Row():
413
+ with gr.Column(scale=1.0, min_width=100):
414
+ chatbot = gr.Chatbot(elem_id="chatbot", label="MM-REACT Bot").style(height=620)
415
+ with gr.Column(scale=0.20, min_width=200, visible=False) as exampleCol:
416
+ with gr.Row():
417
+ grExampleTitle = gr.HTML(exampleTitle, visible=False)
418
+ with gr.Row():
419
+ with gr.Column(scale=0.50, min_width=100):
420
+ example3Image = gr.Image("images/receipt.png", interactive=False).style(height=example_image_size, width=example_image_size)
421
+ with gr.Column(scale=0.50, min_width=100):
422
+ example3ImageButton = gr.Button(elem_id="tryButton", value="Try it!", variant=button_variant).style(full_width=True)
423
+ # dummy text field to hold the path
424
+ example3ImagePath = gr.Text("images/receipt.png", interactive=False, visible=False)
425
+ with gr.Row():
426
+ with gr.Column(scale=0.50, min_width=100):
427
+ example1Image = gr.Image("images/money.png", interactive=False).style(height=example_image_size, width=example_image_size)
428
+ with gr.Column(scale=0.50, min_width=100):
429
+ example1ImageButton = gr.Button(elem_id="tryButton", value="Try it!", variant=button_variant).style(full_width=True)
430
+ # dummy text field to hold the path
431
+ example1ImagePath = gr.Text("images/money.png", interactive=False, visible=False)
432
+ with gr.Row():
433
+ with gr.Column(scale=0.50, min_width=100):
434
+ example2Image = gr.Image("images/bar_plot.png", interactive=False).style(height=example_image_size, width=example_image_size)
435
+ with gr.Column(scale=0.50, min_width=100):
436
+ example2ImageButton = gr.Button(elem_id="tryButton", value="Try it!", variant=button_variant).style(full_width=True)
437
+ # dummy text field to hold the path
438
+ example2ImagePath = gr.Text("images/bar_plot.png", interactive=False, visible=False)
439
+ with gr.Row():
440
+ with gr.Column(scale=0.50, min_width=100):
441
+ example4Image = gr.Image("images/sign.png", interactive=False).style(height=100, width=100)
442
+ with gr.Column(scale=0.50, min_width=100):
443
+ example4ImageButton = gr.Button(elem_id="tryButton", value="Try it!", variant=button_variant).style(full_width=True)
444
+ # dummy text field to hold the path
445
+ example4ImagePath = gr.Text("images/sign.png", interactive=False, visible=False)
446
+ with gr.Row():
447
+ with gr.Column(scale=0.50, min_width=100):
448
+ example5Image = gr.Image("images/face.jpg", interactive=False).style(height=example_image_size, width=example_image_size)
449
+ with gr.Column(scale=0.50, min_width=100):
450
+ example5ImageButton = gr.Button(elem_id="tryButton", value="Try it!", variant=button_variant).style(full_width=True)
451
+ # dummy text field to hold the path
452
+ example5ImagePath = gr.Text("images/face.jpg", interactive=False, visible=False)
453
+
454
+
455
 
456
  with gr.Row():
457
  with gr.Column(scale=0.75):
458
+ message = gr.Textbox(label="Upload a pic and ask!",
459
+ placeholder="Type your question about the uploaded image",
460
  lines=1, visible=False)
461
  with gr.Column(scale=0.15):
462
  submit = gr.Button(value="Send", variant="secondary", visible=False).style(full_width=True)
463
  with gr.Column(scale=0.10, min_width=0):
464
+ btn = gr.UploadButton("🖼️", file_types=["image"], visible=False).style(full_width=True)
 
 
 
 
 
 
 
 
 
 
465
 
466
 
467
+ message.submit(chat, inputs=[message, history_state, chain_state, log_state], outputs=[chatbot, history_state, message])
 
468
 
469
+ submit.click(chat, inputs=[message, history_state, chain_state, log_state], outputs=[chatbot, history_state, message])
 
470
 
471
  btn.upload(add_image, inputs=[history_state, chain_state, btn, log_state], outputs=[history_state, chatbot])
472
 
473
  # load the chain
474
+ reset_btn.click(load_chain, inputs=[history_state, log_state], outputs=[chatbot, history_state, chain_state, log_state, message, submit, btn, exampleCol, grExampleTitle, reset_btn])
475
+
476
+ # setup listener click for the examples
477
+ example1ImageButton.click(add_image_with_path, inputs=[history_state, chain_state, example1ImagePath, log_state], outputs=[history_state, chatbot])
478
+ example2ImageButton.click(add_image_with_path, inputs=[history_state, chain_state, example2ImagePath, log_state], outputs=[history_state, chatbot])
479
+ example3ImageButton.click(add_image_with_path, inputs=[history_state, chain_state, example3ImagePath, log_state], outputs=[history_state, chatbot])
480
+ example4ImageButton.click(add_image_with_path, inputs=[history_state, chain_state, example4ImagePath, log_state], outputs=[history_state, chatbot])
481
+ example5ImageButton.click(add_image_with_path, inputs=[history_state, chain_state, example5ImagePath, log_state], outputs=[history_state, chatbot])
482
+
483
+
484
  # launch the app
485
  block.launch(server_name="0.0.0.0", server_port = ARGS.port)
486
 
 
488
  parser = argparse.ArgumentParser()
489
 
490
  parser.add_argument('--port', type=int, required=False, default=7860)
491
+ parser.add_argument('--openAIModel', type=str, required=False, default='azureChatGPT')
492
  parser.add_argument('--noIntermediateConv', default=False, action='store_true', help='if this flag is turned on no intermediate conversation should be shown')
493
 
494
  global ARGS
495
  ARGS = parser.parse_args()
496
 
497
  init_and_kick_off()
498
+
 
 
 
 
 
 
MM-REACT/images/bar_plot.png ADDED
MM-REACT/images/face.jpg ADDED
MM-REACT/images/money.png ADDED
MM-REACT/images/receipt.png ADDED
MM-REACT/images/sign.png ADDED