File size: 2,808 Bytes
77a3002
72c150d
7adf0ef
77a3002
c9fc1fd
f999396
 
 
c9fc1fd
6b729df
4750fd6
 
 
 
 
 
f999396
 
705e588
43f468e
 
e15ad80
43f468e
 
e15ad80
43f468e
 
e15ad80
43f468e
 
6b4a219
43f468e
 
 
f2473b4
43f468e
 
 
 
 
 
 
6b4a219
43f468e
 
 
77a3002
 
7adf0ef
 
 
43f468e
 
77a3002
 
 
7adf0ef
d7e4d4b
c9fc1fd
d7e4d4b
77a3002
c9fc1fd
7adf0ef
6b729df
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import gradio as gr

# Gradio application setup
def create_demo():
    with gr.Blocks(title="LLAMA 3 Rag on Fly", theme="Monochrome") as demo:
        # App Description
        gr.Markdown(
            """
            ## LLAMA 3 Rag on Fly App

            This application allows you to experiment with Llama-3-8B-Instruct-Gradient-1048k model for RAG.             
            You can adjust various parameters to control the model's output.  

            Original space https://huggingface.co/spaces/ModularityAI/LLama3Rag

            I add save conversation button if you want and change ai model from llama 3 8B instruct with gradientai version           
            """
        )

        with gr.Row():
            # Chatbot and Image Column (75% space)
            with gr.Column(scale=0.95):
                with gr.Row():
                    chat_history = gr.Chatbot(value=[], elem_id='chatbot', height=480)
                    show_img = gr.Image(label='Uploaded PDF', height=480)

            # Sliders Column (25% space)
            with gr.Column(scale=0.05):
                with gr.Row():
                    slider_chunk_size = gr.Slider(
                        minimum=256, maximum=32000, value=256, label="Chunk Size", elem_id='slider1'
                    )
                with gr.Row():
                    slider_overlap_percentage = gr.Slider(
                        minimum=0, maximum=99, value=50, label="Chunk Overlap Percentage", elem_id='slider2'
                    )
                with gr.Row():
                    slider_temp = gr.Slider(
                        minimum=0, maximum=1, value=0.5, label="Model Temperature", elem_id='slider3'
                    )
                with gr.Row():
                    slider_k = gr.Slider(
                        minimum=1, step=1, maximum=20, value=2, label="Max Chunks in Context", elem_id='slider2'
                    )

        # Input and Submit Button Row
        with gr.Row():
            with gr.Column(scale=0.60):
                text_input = gr.Textbox(
                    show_label=False,
                    placeholder="Type here to ask your PDF",
                    container=False
                )
            with gr.Column(scale=0.20):
                submit_button = gr.Button('Send')
            with gr.Column(scale=0.20):
                uploaded_pdf = gr.UploadButton("📁 Upload PDF", file_types=[".pdf"], elem_id='upload_pdf')

    return demo, chat_history, show_img, text_input, submit_button, uploaded_pdf, slider_chunk_size,slider_overlap_percentage,slider_temp,slider_k

if __name__ == '__main__':
    demo, chatbot, show_img, text_input, submit_button, uploaded_pdf, slider_chunk_size,slider_overlap_percentage,slider_temp,slider_k = create_demo()
    demo.queue()
    demo.launch()