File size: 3,961 Bytes
51187cc
 
 
 
 
 
fa20ff3
51187cc
 
 
 
 
fa20ff3
 
 
 
 
 
 
 
 
 
 
51187cc
 
 
 
 
 
fa20ff3
 
 
 
 
 
 
 
51187cc
 
 
 
 
fa20ff3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
51187cc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fa20ff3
51187cc
fa20ff3
51187cc
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
import gradio as gr
import os
import time
from utils import *

vectordb = ""
histr = [[None, "Hi, I'm **everything-rag**πŸ€–.\nI'm here to assist you and let you chat with _your_ pdfs!\nCheck [my website](https://astrabert.github.io/everything-rag/) for troubleshooting and documentation reference\nHave fun!😊"]]

def print_like_dislike(x: gr.LikeData):
    print(x.index, x.value, x.liked)

def add_message(history, message):
    global histr
    if history is not None:
        if len(message["files"]) > 0:
            history.append((message["files"], None))
            histr.append([message["files"], None])
        if message["text"] is not None and message["text"] != "":
            history.append((message["text"], None))
            histr.append([message["text"], None])
    else:
        history = histr
        add_message(history, message)
    return history, gr.MultimodalTextbox(value=None, interactive=False)


def bot(history):
    global vectordb
    global tsk
    global histr
    if not history is None:
        if type(history[-1][0]) != tuple:
            if vectordb == "":
                pipe = pipeline(tsk, tokenizer=tokenizer, model=model)
                response = pipe(history[-1][0])[0]
                response = response["generated_text"]
                histr[-1][1] = response
                history[-1][1] = ""
                for character in response:
                    history[-1][1] += character
                    time.sleep(0.05)
                    yield history
            else:
                try:
                    response = just_chatting(task=tsk, model=model, tokenizer=tokenizer, query=history[-1][0], vectordb=vectordb, chat_history=[convert_none_to_str(his) for his in history])["answer"]
                    history[-1][1] = ""
                    histr[-1][1] = response
                    for character in response:
                        history[-1][1] += character
                        time.sleep(0.05)
                        yield history
                except Exception as e:
                    response = f"Sorry, the error '{e}' occured while generating the response; check [troubleshooting documentation](https://astrabert.github.io/everything-rag/#troubleshooting) for more"
        if type(history[-1][0]) == tuple:
            filelist = []
            for i in history[-1][0]:
                filelist.append(i)
            finalpdf = merge_pdfs(filelist)
            vectordb = create_a_persistent_db(finalpdf, os.path.dirname(finalpdf)+"_localDB", os.path.dirname(finalpdf)+"_embcache")
            response = "VectorDB was successfully created, now you can ask me anything about the document you uploaded!😊"
            histr[-1][1] = response
            history[-1][1] = ""
            for character in response:
                history[-1][1] += character
                time.sleep(0.05)
                yield history
    else:
        history = histr
        bot(history)

with gr.Blocks() as demo:
    chatbot = gr.Chatbot(
        [[None, "Hi, I'm **everything-rag**πŸ€–.\nI'm here to assist you and let you chat with _your_ pdfs!\nCheck [my website](https://astrabert.github.io/everything-rag/) for troubleshooting and documentation reference\nHave fun!😊"]],
        label="everything-rag",
        elem_id="chatbot",
        bubble_full_width=False,
    )

    chat_input = gr.MultimodalTextbox(interactive=True, file_types=["pdf"], placeholder="Enter message or upload file...", show_label=False)

    chat_msg = chat_input.submit(add_message, [chatbot, chat_input], [chatbot, chat_input])
    bot_msg = chat_msg.then(bot, chatbot, chatbot, api_name="bot_response")
    bot_msg.then(lambda: gr.MultimodalTextbox(interactive=True), None, [chat_input])

    chatbot.like(print_like_dislike, None, None)

demo.queue()

if __name__ == "__main__":
    demo.launch()