Spaces:
Sleeping
Sleeping
import gradio as gr | |
from ChatErector import conversation, initializer | |
def ui(): | |
css = ''' | |
.label {text-align: center; font-size: 1.5em; !important} | |
''' | |
with gr.Blocks(theme=gr.themes.Default(primary_hue="red", secondary_hue="pink", neutral_hue = "purple"), css=css) as ui: | |
qa_chain = gr.State() | |
gr.HTML("<center><h1>Ask your Manuscript</h1><center>") | |
gr.HTML("""<center><h3>Simple Chatbot demo with RAG</h3><center>""") | |
gr.Markdown("""<h4>Upload your documents to initilize conversation system. It could take some time to preprocess documents if there are many of them.</h4>""") | |
with gr.Row(): | |
with gr.Column(scale = 86): | |
gr.Markdown("""<h3><b>Important: The demo only works with pdf. It must be initialized by creating database.</b></h3>""") | |
with gr.Row(): | |
document = gr.Files(height=300, file_count="multiple", file_types=["pdf"], interactive=True, | |
label='Upload PDF documents.') | |
with gr.Row(): | |
gr.Examples([["./examples/Roe V Wade.pdf"],["./examples/Trade act.pdf"], | |
[["./examples/Roe V Wade.pdf","./examples/Trade act.pdf"]]], | |
document, | |
label='Documents Examples') | |
with gr.Row(): | |
gr.Markdown("<h5><b>Advanced settings</b></h5>") | |
with gr.Row(): | |
with gr.Accordion("Setup", open=False): | |
with gr.Row(): | |
slider_temperature = gr.Slider(minimum = 0.01, maximum = 1.0, value=0.2, step=0.1, | |
label="Temperature", | |
info="Controls randomness in token generation (not recommended to set up higher than 0.5)", interactive=True) | |
with gr.Row(): | |
slider_maxtokens = gr.Slider(minimum = 128, maximum = 9192, value=4096, step=128, | |
label="Max New Tokens", | |
info="Maximum number of tokens to be generated",interactive=True) | |
with gr.Row(): | |
slider_topk = gr.Slider(minimum = 1, maximum = 10, value=3, step=1, label="top-k", | |
info="Number of tokens to select the next token from", interactive=True) | |
with gr.Row(): | |
thold = gr.Slider(minimum = 0.01, maximum = 1.0, value=0.8, step=0.1, label="Treshold", | |
info="Retrieved information relevance level (not recommended to set up higher than 0.8)", interactive=True) | |
with gr.Row(): | |
qachain_btn = gr.Button("Create database") | |
with gr.Row(): | |
llm_progress = gr.Textbox(value="Not initialized", label="Database creating status") # label="Chatbot status", | |
with gr.Column(scale = 200): | |
chatbot = gr.Chatbot(height=505) | |
with gr.Row(): | |
msg = gr.Textbox(placeholder="Ask a question", container=True) | |
with gr.Row(): | |
gr.Examples([["What countries does trade act impact?"], | |
["Do the women have a right for abortion in case they were raped in Michigan state?"]], msg, | |
label='Correct questions Examples. Be concrete as much as possible.') | |
with gr.Row(): | |
submit_btn = gr.Button("Submit") | |
clear_btn = gr.ClearButton([msg, chatbot], value="Clear") | |
# Preprocessing events | |
qachain_btn.click(initializer, | |
inputs=[document, slider_temperature, slider_maxtokens, slider_topk, thold], | |
outputs=[qa_chain, llm_progress], | |
queue=False) | |
# Chatbot events | |
msg.submit(conversation, | |
inputs=[qa_chain, msg, chatbot], | |
outputs=[qa_chain, msg, chatbot], | |
queue=False) | |
submit_btn.click(conversation, | |
inputs=[qa_chain, msg, chatbot], | |
outputs=[qa_chain, msg, chatbot], | |
queue=False) | |
clear_btn.click(lambda:[None,None]) | |
ui.queue().launch(debug=True) |