doctorsafe commited on
Commit
0b0f901
·
1 Parent(s): a285530

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -67
app.py CHANGED
@@ -1,70 +1,26 @@
1
- import gradio as gr
2
- from predict import predict
3
- from toolbox import format_io, find_free_port
4
 
5
- try: from config_private import proxies, WEB_PORT, LLM_MODEL
6
- except: from config import proxies, WEB_PORT, LLM_MODEL
 
 
 
 
 
 
 
 
 
 
 
 
7
 
8
- # 对一些丧心病狂的实验性功能模块进行测试
9
- from functional_crazy import get_crazy_functionals, on_file_uploaded, on_report_generated
10
- crazy_functional = get_crazy_functionals()
 
11
 
12
- # 处理markdown文本格式的转变
13
- gr.Chatbot.postprocess = format_io
14
-
15
- # 做一些外观色彩上的调整
16
- from theme import adjust_theme
17
- set_theme = adjust_theme()
18
-
19
- with gr.Blocks(theme=set_theme, analytics_enabled=False) as demo:
20
- gr.HTML(title_html)
21
- with gr.Row():
22
- with gr.Column(scale=2):
23
- chatbot = gr.Chatbot()
24
- chatbot.style(height=1000)
25
- chatbot.style()
26
- history = gr.State([])
27
- TRUE = gr.State(True)
28
- FALSE = gr.State(False)
29
- with gr.Column(scale=1):
30
- with gr.Row():
31
- with gr.Column(scale=12):
32
- txt = gr.Textbox(show_label=False, placeholder="Input question here.").style(container=False)
33
- with gr.Column(scale=1):
34
- submitBtn = gr.Button("提交", variant="primary")
35
- with gr.Row():
36
- from check_proxy import check_proxy
37
- statusDisplay = gr.Markdown(f"Tip: 按Enter提交, 按Shift+Enter换行. \nNetwork: {check_proxy(proxies)}\nModel: {LLM_MODEL}")
38
- with gr.Row():
39
- for k in functional:
40
- variant = functional[k]["Color"] if "Color" in functional[k] else "secondary"
41
- functional[k]["Button"] = gr.Button(k, variant=variant)
42
- with gr.Row():
43
- gr.Markdown("以下部分实验性功能需从input框读取路径.")
44
- with gr.Row():
45
- for k in crazy_functional:
46
- variant = crazy_functional[k]["Color"] if "Color" in crazy_functional[k] else "secondary"
47
- crazy_functional[k]["Button"] = gr.Button(k, variant=variant)
48
- with gr.Row():
49
- gr.Markdown("上传本地文件供上面的实验性功能调用.")
50
- with gr.Row():
51
- file_upload = gr.Files(label='任何文件,但推荐上传压缩文件(zip, tar)', file_count="multiple")
52
-
53
- systemPromptTxt = gr.Textbox(show_label=True, placeholder=f"System Prompt", label="System prompt", value=initial_prompt).style(container=True)
54
- #inputs, top_p, temperature, top_k, repetition_penalty
55
- with gr.Accordion("arguments", open=False):
56
- top_p = gr.Slider(minimum=-0, maximum=1.0, value=1.0, step=0.01,interactive=True, label="Top-p (nucleus sampling)",)
57
- temperature = gr.Slider(minimum=-0, maximum=5.0, value=1.0, step=0.01, interactive=True, label="Temperature",)
58
-
59
- txt.submit(predict, [txt, top_p, temperature, chatbot, history, systemPromptTxt], [chatbot, history, statusDisplay])
60
- submitBtn.click(predict, [txt, top_p, temperature, chatbot, history, systemPromptTxt], [chatbot, history, statusDisplay], show_progress=True)
61
- for k in functional:
62
- functional[k]["Button"].click(predict,
63
- [txt, top_p, temperature, chatbot, history, systemPromptTxt, TRUE, gr.State(k)], [chatbot, history, statusDisplay], show_progress=True)
64
- file_upload.upload(on_file_uploaded, [file_upload, chatbot, txt], [chatbot, txt])
65
- for k in crazy_functional:
66
- click_handle = crazy_functional[k]["Button"].click(crazy_functional[k]["Function"],
67
- [txt, top_p, temperature, chatbot, history, systemPromptTxt, gr.State(PORT)], [chatbot, history, statusDisplay]
68
- )
69
- try: click_handle.then(on_report_generated, [file_upload, chatbot], [file_upload, chatbot])
70
- except: pass
 
1
+ import openai
2
+ import gradio as gr
3
+ openai.api_key = "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"
4
 
5
+ def chatgpt(content, temperature=0.8):
6
+ response = openai.ChatCompletion.create(
7
+ model="gpt-3.5-turbo",
8
+ messages=[
9
+ {"role": "user", "content": content}
10
+ ],
11
+ temperature=temperature,
12
+ max_tokens=1000,
13
+ top_p=1,
14
+ frequency_penalty=0,
15
+ presence_penalty=0,
16
+ )
17
+ # print(response)
18
+ return response.choices[0].message.content
19
 
20
+ if __name__ == "__main__":
21
+ def send_chatgpt(text):
22
+ output = chatgpt(text,0.8)
23
+ return output
24
 
25
+ interface = gr.Interface(fn=send_chatgpt, inputs="text", outputs="text")
26
+ interface.launch()