nxphi47 commited on
Commit
be06e3c
1 Parent(s): 02ffef9

Update multipurpose_chatbot/demos/rag_chat_interface.py

Browse files
multipurpose_chatbot/demos/rag_chat_interface.py CHANGED
@@ -73,6 +73,8 @@ from ..configs import (
73
  CHUNK_SIZE,
74
  CHUNK_OVERLAP,
75
  RAG_EMBED_MODEL_NAME,
 
 
76
  )
77
 
78
  RAG_CURRENT_VECTORSTORE = None
@@ -289,12 +291,21 @@ class RagChatInterface(CustomizedChatInterface):
289
  if description:
290
  Markdown(description)
291
 
292
- if chatbot:
293
- self.chatbot = chatbot.render()
294
- else:
295
- self.chatbot = Chatbot(
296
- label="Chatbot", scale=1, height=200 if fill_height else None
 
 
 
297
  )
 
 
 
 
 
 
298
 
299
  with Row():
300
  for btn in [retry_btn, undo_btn, clear_btn]:
@@ -402,13 +413,13 @@ class RagChatInterface(CustomizedChatInterface):
402
  if not input_component.is_rendered:
403
  input_component.render()
404
 
405
- self.rag_content = gr.Textbox(
406
- scale=4,
407
- lines=16,
408
- label='Retrieved RAG context',
409
- placeholder="Rag context and instrution will show up here",
410
- interactive=False
411
- )
412
 
413
  # The example caching must happen after the input components have rendered
414
  if cache_examples:
@@ -591,7 +602,11 @@ class RagChatInterfaceDemo(ChatInterfaceDemo):
591
 
592
  from ..configs import RAG_EMBED_MODEL_NAME
593
 
594
- description = description or f"""Upload a long document to ask question about it with RAG. Embedding model {RAG_EMBED_MODEL_NAME}"""
 
 
 
 
595
 
596
  additional_inputs = [
597
  gr.File(label='Upload Document', file_count='single', file_types=['pdf', 'docx', 'txt']),
@@ -620,6 +635,9 @@ class RagChatInterfaceDemo(ChatInterfaceDemo):
620
  { "left": "$$", "right": "$$", "display": True},
621
  ],
622
  show_copy_button=True,
 
 
 
623
  ),
624
  textbox=gr.Textbox(placeholder='Type message', lines=1, max_lines=128, min_width=200, scale=8),
625
  submit_btn=gr.Button(value='Submit', variant="primary", scale=0),
 
73
  CHUNK_SIZE,
74
  CHUNK_OVERLAP,
75
  RAG_EMBED_MODEL_NAME,
76
+ CHATBOT_HEIGHT,
77
+ USE_PANEL,
78
  )
79
 
80
  RAG_CURRENT_VECTORSTORE = None
 
291
  if description:
292
  Markdown(description)
293
 
294
+ with Row():
295
+ self.rag_content = gr.Textbox(
296
+ scale=1,
297
+ lines=4,
298
+ max_lines=16,
299
+ label='Retrieved RAG context',
300
+ placeholder="Rag context and instrution will show up here",
301
+ interactive=False
302
  )
303
+ if chatbot:
304
+ self.chatbot = chatbot.render()
305
+ else:
306
+ self.chatbot = Chatbot(
307
+ label="Chatbot", scale=3, height=200 if fill_height else None
308
+ )
309
 
310
  with Row():
311
  for btn in [retry_btn, undo_btn, clear_btn]:
 
413
  if not input_component.is_rendered:
414
  input_component.render()
415
 
416
+ # self.rag_content = gr.Textbox(
417
+ # scale=4,
418
+ # lines=16,
419
+ # label='Retrieved RAG context',
420
+ # placeholder="Rag context and instrution will show up here",
421
+ # interactive=False
422
+ # )
423
 
424
  # The example caching must happen after the input components have rendered
425
  if cache_examples:
 
602
 
603
  from ..configs import RAG_EMBED_MODEL_NAME
604
 
605
+ description = (
606
+ description or
607
+ f"""Upload a long document to ask question with RAG. Check the RAG retrieved text segment on the left.
608
+ Control `RAG instruction` below to fit your language. Embedding model {RAG_EMBED_MODEL_NAME}."""
609
+ )
610
 
611
  additional_inputs = [
612
  gr.File(label='Upload Document', file_count='single', file_types=['pdf', 'docx', 'txt']),
 
635
  { "left": "$$", "right": "$$", "display": True},
636
  ],
637
  show_copy_button=True,
638
+ scale=3,
639
+ layout="panel" if USE_PANEL else "bubble",
640
+ height=CHATBOT_HEIGHT,
641
  ),
642
  textbox=gr.Textbox(placeholder='Type message', lines=1, max_lines=128, min_width=200, scale=8),
643
  submit_btn=gr.Button(value='Submit', variant="primary", scale=0),