LLama3Rag / src /interface.py
hanzla's picture
testing with llama3
ab45b68
raw
history blame
1.24 kB
import gradio as gr
# Gradio application setup
def create_demo():
with gr.Blocks(title="RAG Chatbot Q&A", theme="Soft") as demo:
with gr.Column():
with gr.Row():
chat_history = gr.Chatbot(value=[], elem_id='chatbot', height=680)
show_img = gr.Image(label='Overview', height=680)
with gr.Row():
# Add sliders here
with gr.Column(): # Adjust scale as needed
slider1 = gr.Slider(minimum=256, maximum=1024, value=256, label="Chunk Size")
with gr.Row():
with gr.Column(scale=0.60):
text_input = gr.Textbox(
show_label=False,
placeholder="Type here to ask your PDF",
container=False)
with gr.Column(scale=0.20):
submit_button = gr.Button('Send')
with gr.Column(scale=0.20):
uploaded_pdf = gr.UploadButton("πŸ“ Upload PDF", file_types=[".pdf"])
return demo, chat_history, show_img, text_input, submit_button, uploaded_pdf, slider1
if __name__ == '__main__':
demo, chatbot, show_img, text_input, submit_button, uploaded_pdf, slider1 = create_demo()
demo.queue()
demo.launch()