File size: 1,874 Bytes
f4b4235 b7adec7 f4b4235 b7adec7 f4b4235 b7adec7 f4b4235 b7adec7 f4b4235 b7adec7 f4b4235 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 |
import g4f
import gradio as gr
from g4f.Provider import (
Ails,
You,
Bing,
Yqcloud,
Theb,
Aichat,
Bard,
Vercel,
Forefront,
Lockchat,
Liaobots,
H2o,
ChatgptLogin,
DeepAi,
GetGpt
)
import os
provider_dict = {
# 'Ails': Ails,
# 'You': You,
# 'Bing': Bing,
# 'Yqcloud': Yqcloud,
# 'Theb': Theb,
# 'Aichat': Aichat,
# 'Bard': Bard,
# 'Vercel': Vercel,
# 'Forefront': Forefront,
# 'Lockchat': Lockchat,
# 'Liaobots': Liaobots,
# 'H2o': H2o,
# 'ChatgptLogin': ChatgptLogin,
'DeepAi': DeepAi,
'GetGpt': GetGpt
}
with gr.Blocks() as demo:
chatbot = gr.Chatbot([[None, None]], label='AI')
msg = gr.Textbox(value="", label='')
clear = gr.Button("Clear")
with gr.Row():
model_name = gr.Dropdown(['gpt-3.5-turbo', 'gpt-4'], value='gpt-3.5-turbo', label='模型')
provider_name = gr.Dropdown(provider_dict.keys(), value='GetGpt', label='提供者')
def user(user_message, history):
return gr.update(value="", interactive=False), history + [[user_message, None]]
def bot(history, model_name, provider_name):
history[-1][1] = ''
bot_msg = g4f.ChatCompletion.create(model=model_name,
provider=provider_dict[provider_name],
messages=[{"role": "user", "content": history[-1][0]}],
stream=True)
for c in bot_msg:
history[-1][1] += c
yield history
response = msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
bot, [chatbot, model_name, provider_name], chatbot
)
response.then(lambda: gr.update(interactive=True), None, [msg], queue=False)
clear.click(lambda: None, None, chatbot, queue=False)
demo.title = "AI Chat"
demo.queue()
demo.launch() |