File size: 5,792 Bytes
82a00d8
 
 
 
393f574
 
 
d0ccc30
82a00d8
 
 
 
393f574
 
 
 
 
 
 
 
 
 
a3cc1e1
82a00d8
 
5258a5e
970236e
d721a42
5258a5e
d02c848
82a00d8
 
69422d5
82a00d8
 
 
 
a3cc1e1
 
 
598186e
 
 
a3cc1e1
 
 
 
 
 
82a00d8
 
 
ac1d772
82a00d8
a3cc1e1
 
82a00d8
 
 
 
 
 
 
 
6f95f34
82a00d8
 
69422d5
 
82a00d8
 
 
 
 
6f95f34
82a00d8
 
 
 
 
 
f8aab44
d766545
 
f10d486
e0839b7
 
 
f10d486
 
a3cc1e1
 
 
 
 
 
 
82a00d8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a3cc1e1
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
import gradio as gr
import os
import sys
import argparse

from huggingface_hub import hf_hub_download

my_api_key = os.environ.get('my_api_key')

if my_api_key == "empty":
    print("Please give a api key!")
    sys.exit(1)

token = os.environ['HUB_TOKEN']
loc1 =hf_hub_download(repo_id="JunchuanYu/files_sydney", filename="utils.py",repo_type="dataset",local_dir='.',token=token)
loc2 =hf_hub_download(repo_id="JunchuanYu/files_sydney", filename="chat_func.py",repo_type="dataset",local_dir='.',token=token)

sys.path.append(loc1)
sys.path.append(loc2)

from utils import *
from chat_func import *
    
gr.Chatbot.postprocess = postprocess


with open("css_new.css", "r", encoding="utf-8") as f:
    css = f.read()

with gr.Blocks(css=css,theme='gradio/soft') as demo:
    history = gr.State([])
    token_count = gr.State([])
    promptTemplates = gr.State(load_template('myprompts.json', mode=2))
    user_api_key = gr.State(my_api_key)
    TRUECOMSTANT = gr.State(True)
    FALSECONSTANT = gr.State(False)
    gr.Markdown(title)
    
    with gr.Row().style(equal_height=True):
        with gr.Column(scale=12):
    #         gr.Markdown("""
    # #####  openai方面算力紧张导致Sydney网站运行很不稳定时常掉线,据报道OpenAI 的Plus版API一度停售,且限制3小时内只能问100个问题,预计这种情况要持续一段时间,请大家耐心等待,目前没有好的解决方法
    # """)
            with gr.Accordion("Build by [45度科研人](WeChat Public Accounts)", open=False):
                gr.Markdown(description)
        with gr.Column(scale=1):
            with gr.Box():
                toggle_dark = gr.Button(value="Toggle Dark").style(full_width=True)
   
    with gr.Row(scale=1).style(equal_height=True):
        with gr.Column(scale=5):
            with gr.Column():
                chatbot = gr.Chatbot()
                user_input = gr.Textbox(show_label=False, placeholder="Enter text and press submit", visible=True).style(container=False)
                submitBtn = gr.Button("Submit",variant="primary").style(container=False)
                emptyBtn = gr.Button("Restart Conversation",variant="secondary")
                status_display = gr.Markdown("")

        with gr.Column():
            with gr.Column(min_width=50):
                with gr.Tab(label="ChatGPT"):
                    with gr.Column():
                        with gr.Row():
                            keyTxt = gr.Textbox(show_label=False, placeholder=f"You can input your own openAI API-key",value=hide_middle_chars(my_api_key),visible=True, type="password",  label="API-Key")
                            systemPromptTxt = gr.Textbox(show_label=True,placeholder=f"Set a custom insruction for the chatbot: You are a helpful assistant.",label="Custom prompt",value=initial_prompt,lines=10,)

                        with gr.Row():
                            templateSelectDropdown = gr.Dropdown(label="load from template",choices=load_template('myprompts.json', mode=1),
                                multiselect=False,value=load_template('myprompts.json', mode=1)[0],).style(container=False)                
                
                with gr.Tab(label="Settings"):
                    with gr.Column():
                        with gr.Row():
                            with gr.Column(scale=3):
                                saveFileName = gr.Textbox(show_label=True, placeholder=f"output file name...",label='Save conversation history', value="")
                            with gr.Column(scale=1):
                                exportMarkdownBtn = gr.Button("Save")
                        with gr.Row():
                            with gr.Column(scale=1):
                                downloadFile = gr.File(interactive=False)
    gr.Markdown("""
    ###  <div align=center>you can follow the WeChat public account [45度科研人] and leave me a message!  </div>
    <br />
    <br />
    <div style="display:flex; justify-content:center;">
        <img src="https://dunazo.oss-cn-beijing.aliyuncs.com/blog/wechat-simple.png" style="margin-right:25px;width:200px;height:200px;">
        <div style="width:25px;"></div>
        <img src="https://dunazo.oss-cn-beijing.aliyuncs.com/blog/shoukuanma222.png" style="margin-left:25px;width:170px;height:190px;">
    </div>
    """)
    
    toggle_dark.click(None,_js="""
        () => {
            document.body.classList.toggle('dark');
            document.querySelector('gradio-app').style.backgroundColor = 'var(--color-background-primary)'
        }""",)
    
    keyTxt.submit(submit_key, keyTxt, [user_api_key, status_display])
    keyTxt.change(submit_key, keyTxt, [user_api_key, status_display])
    # Chatbot
    user_input.submit(predict,[user_api_key,systemPromptTxt,history,user_input,chatbot,token_count,],[chatbot, history, status_display, token_count],show_progress=True)
    user_input.submit(reset_textbox, [], [user_input])

    submitBtn.click(predict,[user_api_key,systemPromptTxt,history,user_input,chatbot,token_count,],[chatbot, history, status_display, token_count],show_progress=True)
    submitBtn.click(reset_textbox, [], [user_input])

    emptyBtn.click(reset_state,outputs=[chatbot, history, token_count, status_display],show_progress=True,)

    templateSelectDropdown.change(get_template_content,[promptTemplates, templateSelectDropdown, systemPromptTxt],[systemPromptTxt],show_progress=True,)
    exportMarkdownBtn.click(export_markdown,[saveFileName, systemPromptTxt, history, chatbot],downloadFile,show_progress=True,)
    downloadFile.change(load_chat_history,[downloadFile, systemPromptTxt, history, chatbot],[saveFileName, systemPromptTxt, history, chatbot],)


if __name__ == "__main__":
    demo.queue().launch(debug=False,show_api=False)