|
|
|
|
|
import openai |
|
import html |
|
import os |
|
import socket |
|
|
|
import gradio as gr |
|
import random |
|
import time |
|
import logging |
|
import yaml |
|
|
|
with open("./BOT_INIT_PROMPTS.yml", "r") as f: |
|
BOT_INIT_PROMPTS = yaml.safe_load(f) |
|
logging.basicConfig(level=logging.INFO) |
|
|
|
MODELS = [ |
|
'gpt-3.5-turbo', |
|
|
|
] |
|
|
|
openai.api_key = os.getenv("OPENAI_API_KEY") |
|
|
|
|
|
def parse_request(request: gr.Request): |
|
client_ip = request.client.host |
|
local_ip = socket.gethostbyname(socket.gethostbyname("")) |
|
headers = request.headers |
|
if headers and 'x-forwarded-for' in headers: |
|
x_forwarded_for = headers['x-forwarded-for'] |
|
client_ip = x_forwarded_for.split(' ')[0] if x_forwarded_for else "" |
|
|
|
return {"client_ip": client_ip, |
|
"local_ip": local_ip, |
|
"headers": headers} |
|
|
|
|
|
def reduce_history(history): |
|
mid = len(history) // 2 |
|
logging.info('history reduced') |
|
return history[:mid] + history[mid+2:] |
|
|
|
|
|
def chat_with_model(request, role, content, history=[], model=MODELS[0]): |
|
client_ip = parse_request(request)["client_ip"] |
|
if role == 'system': |
|
history = [] |
|
history.append({'role': role, 'content': content}) |
|
response = openai.ChatCompletion.create( |
|
model=model, |
|
messages=history |
|
) |
|
|
|
content = html.escape(response['choices'][0]['message']['content']) |
|
total_tokens = response['usage']['total_tokens'] |
|
history.append({'role': 'assistant', 'content': content}) |
|
logging.info('client={}, history count={}'.format( |
|
client_ip, str(len(history)))) |
|
if 4096 - total_tokens < 500: |
|
history = reduce_history(history) |
|
return content, history |
|
|
|
|
|
def launch(): |
|
with gr.Blocks() as demo: |
|
history = gr.State([]) |
|
with gr.Row(): |
|
with gr.Column(scale=3): |
|
chatbot = gr.Chatbot([[None, "在右侧选择聊天主题开始对话吧~"]], label="对话框") |
|
msg = gr.Textbox(label="输入框", placeholder="输入文字,按回车键提交") |
|
submit = gr.Button("Submit", variant="primary") |
|
clear = gr.Button("Reload") |
|
with gr.Column(scale=1): |
|
model_selector = gr.Dropdown( |
|
MODELS, value=MODELS[0], interactive=True, label="选择模型", info="选择一个模型" |
|
) |
|
bot_selector = gr.Dropdown( |
|
list(BOT_INIT_PROMPTS.keys()), value="自由对话", interactive=True, label="选择主题", info="选择一个聊天主题" |
|
) |
|
|
|
def user(user_message, chatbot): |
|
return user_message, chatbot + [[user_message, None]] |
|
|
|
def bot_resp(msg, chatbot, history, model, request: gr.Request): |
|
bot_message, history = chat_with_model( |
|
request, 'user', msg, history, model) |
|
chatbot[-1][1] = bot_message |
|
return '', chatbot, history |
|
|
|
def bot_change(selected_bot, history, model, request: gr.Request): |
|
client_ip = parse_request(request)["client_ip"] |
|
logging.info( |
|
f'client={client_ip} Bot changed={selected_bot}') |
|
if BOT_INIT_PROMPTS.get(selected_bot): |
|
content, history = chat_with_model(request, |
|
'system', BOT_INIT_PROMPTS[selected_bot], history, model) |
|
return [[None, content]], '', history |
|
else: |
|
return [], '', [] |
|
|
|
|
|
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then( |
|
bot_resp, [msg, chatbot, history, model_selector], [ |
|
msg, chatbot, history] |
|
) |
|
submit.click(user, [msg, chatbot], [msg, chatbot], queue=False).then( |
|
bot_resp, [msg, chatbot, history, model_selector], [ |
|
msg, chatbot, history] |
|
) |
|
clear.click(lambda: None, None, chatbot, queue=False).then( |
|
bot_change, [bot_selector, history, model_selector], [chatbot, msg, history]) |
|
bot_selector.change(lambda: [None, None], None, [msg, chatbot]).then( |
|
bot_change, [bot_selector, history, model_selector], [chatbot, msg, history]) |
|
|
|
demo.launch() |
|
|
|
|
|
if __name__ == '__main__': |
|
launch() |
|
|