File size: 5,370 Bytes
2ebd226
7209f44
464cf39
c6d6467
7209f44
ee5bd25
 
8ef5719
 
 
 
ecc1b79
 
 
ee5bd25
 
 
 
ecc1b79
7209f44
464cf39
ee5bd25
 
 
464cf39
 
 
 
 
 
 
 
db03bbc
464cf39
ee5bd25
464cf39
 
 
ecc1b79
 
 
cd2cb77
 
db03bbc
 
464cf39
db03bbc
 
 
464cf39
ee5bd25
 
464cf39
 
 
 
2ebd226
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
464cf39
cd2cb77
 
 
67ebcda
cd2cb77
 
db03bbc
 
cd2cb77
 
 
 
 
0358438
db03bbc
0358438
 
cd2cb77
 
 
 
 
 
db03bbc
cd2cb77
 
2ebd226
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cd2cb77
774c8a9
 
c6e6de5
 
7127080
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
from PyPDF2 import PdfReader
import gradio as gr
import openai
import os

 # The first line contains the OpenAI key, while the second line provides the OpenAI URL, which is useful when the OpenAI server is hidden behind a proxy server.
 # eg. first line "sk-xxxxxxxxxx", second line "http://PROXY-URL"
if os.path.isfile('config'):
    config = open("config").readlines()
else:
    config = ""
api_key_from_config = ""
if len(config) > 0 and len(config[0].strip()) > 0:
    api_key_from_config = config[0].strip()
if len(config) > 1 and len(config[1].strip()) > 0:
    openai.api_base = config[1].strip()

# config
DEBUG = True

'''
 gradio: [['first question', 'No'], ['second question', 'Yes']]
 openai: [{"role": "user", "content": "first question"}, {"role": "assistant", "content": "No"}
          {"role": "user", "content": "second question"}, {"role": "assistant", "content": "Yes"}]
'''
def gradio_messages_to_openai_messages(g):
    result = []
    for pair in g:
        result.append({"role": "user", "content": pair[0]})
        result.append({"role": "assistant", "content": pair[1]})
    return result

def respond(chat_history, message, system_message, key_txt, url_txt, model, temperature):
    messages = [
            {"role": "system", "content": system_message},
            *gradio_messages_to_openai_messages(chat_history),
            {"role": "user", "content": message}
    ] 
    openai.api_key = key_txt if key_txt else api_key_from_config
    if url_txt:
        openai.api_base = url_txt
    if DEBUG:
        print("messages:", messages)
        print("model:", model)
        print("temperature:", temperature)
    completion = openai.ChatCompletion.create(
        model=model,
        messages=messages,
        temperature=temperature,
    )
    if DEBUG:
        print("completion:", completion)
    response = completion['choices'][0]['message']['content']
    result = chat_history + [[message, response]]
    return result

def parse_pdf(prompt, pdfs, system_message, key_txt, url_txt, model, temperature):
    result = ""
    full_text = ""
    for pdf in pdfs:
        print("parse: ", pdf)
        text = ""
        reader = PdfReader(pdf.name)
        for page in reader.pages:
            text = text + page.extract_text()
        full_text = text + "\n----------\n"
    messages = [
            {"role": "system", "content": system_message},
            {"role": "user", "content": prompt + "\n\n###\n\n + full_text}
    ]
    openai.api_key = key_txt if key_txt else api_key_from_config
    if url_txt:
        openai.api_base = url_txt
    if DEBUG:
        print("messages:", messages)
        print("model:", model)
        print("temperature:", temperature)
    completion = openai.ChatCompletion.create(
        model=model,
        messages=messages,
        temperature=temperature,
    )
    if DEBUG:
        print("completion:", completion)
    response = completion['choices'][0]['message']['content']

    return response

with gr.Blocks() as demo:
    with gr.Tab("Config"):
        with gr.Row():
            key_txt = gr.Textbox(label = "Openai Key", placeholder="Enter openai key 'sk-xxxx'%s" %
                    (", Leave empty to use value from config file" if api_key_from_config else ""))
            url_txt = gr.Textbox(label = "Openai API Base URL", placeholder="Enter openai base url 'https://xxx', Leave empty to use value '%s'" % openai.api_base)
        system_message = gr.Textbox(label = "System Message:", value = "You are an assistant who gives brief and concise answers.")
        model = gr.Dropdown(label="Model", choices=["gpt-3.5-turbo", "gpt-3.5-turbo-0301", "gpt-4"], multiselect=False, value="gpt-3.5-turbo", type="value")
        temperature = gr.Slider(0, 2, value=1, label="Temperature", step=0.1, info="What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.")
    with gr.Tab("Chat"):
        gr.Markdown("## Chat with GPT")
        chatbot = gr.Chatbot()
        message = gr.Textbox(label = "Message:", placeholder="Enter text and press 'Send'")
        message.submit(
            respond,
            [chatbot, message, system_message, key_txt, url_txt, model, temperature],
            chatbot,
        )
        with gr.Row():
            clear = gr.Button("Clear")
            clear.click(lambda: None, None, chatbot)
            send = gr.Button("Send")
            send.click(
                respond,
                [chatbot, message, system_message, key_txt, url_txt, model, temperature],
                chatbot,
            )
    with gr.Tab("PDF"):
        gr.Markdown("## Parse PDF with GPT")
        prompt = gr.Text(label="Prompt")
        pdfs = gr.File(label="Upload PDF", file_count="multiple", file_types=[".pdf"])
        markdown = gr.Markdown(label="Output")
        with gr.Row():
            clear = gr.Button("Clear")
            clear.click(lambda: None, None, markdown)
            submit = gr.Button("Upload")
            submit.click(
                    parse_pdf,
                    [prompt, pdfs, system_message, key_txt, url_txt, model, temperature],
                    markdown
            )


if __name__ == "__main__":
    demo.launch()
else:
    # run with "gradio app.py"
    demo.launch(server_name="0.0.0.0")