Spaces:
Sleeping
Sleeping
import gradio as gr | |
import pandas as pd | |
import os | |
from openai import OpenAI | |
OPEN_AI_KEY = os.getenv("OPEN_AI_KEY") | |
client = OpenAI(api_key=OPEN_AI_KEY) | |
def process_file(file): | |
# 读取文件 | |
if file.name.endswith('.csv'): | |
df = pd.read_csv(file) | |
else: | |
df = pd.read_excel(file) | |
# 将 DataFrame 转换为字符串 | |
df_string = df.to_string() | |
# 返回 DataFrame 字符串,以用作聊天机器人的系统提示 | |
return df_string | |
def respond(user_message, df_string_output, chat_history): | |
print("=== 變數:user_message ===") | |
print(user_message) | |
print("=== 變數:chat_history ===") | |
print(chat_history) | |
sys_content = f"你是一個資料分析師,請用 {df_string_output} 為資料進行對話" | |
messages = [ | |
{"role": "system", "content": sys_content}, | |
{"role": "user", "content": user_message} | |
] | |
print("=====messages=====") | |
print(messages) | |
print("=====messages=====") | |
request_payload = { | |
"model": "gpt-4-1106-preview", | |
"messages": messages, | |
"max_tokens": 2000 # 設定一個較大的值,可根據需要調整 | |
} | |
response = client.chat.completions.create(**request_payload) | |
print(response) | |
response_text = response.choices[0].message.content.strip() | |
# 更新聊天历史 | |
new_chat_history = (user_message, response_text) | |
if chat_history is None: | |
chat_history = [new_chat_history] | |
else: | |
chat_history.append(new_chat_history) | |
# 返回聊天历史和空字符串清空输入框 | |
return "", chat_history | |
with gr.Blocks() as demo: | |
with gr.Row(): | |
with gr.Column(): | |
file_upload = gr.File(label="Upload your file") | |
with gr.Column(): | |
df_string_output = gr.Textbox(label="") | |
with gr.Row(): | |
chatbot = gr.Chatbot() | |
with gr.Row(): | |
with gr.Column(): | |
with gr.Group(): | |
# password_input = gr.Textbox(label="Password", type="password") | |
msg = gr.Textbox(label="請输入對話內容") | |
send_button = gr.Button("發送") | |
# 当文件上传时,更新 DataFrame 字符串 | |
file_upload.change(process_file, inputs=file_upload, outputs=df_string_output) | |
# 处理聊天机器人的对话 | |
send_button.click( | |
respond, | |
inputs=[msg, df_string_output, chatbot], | |
outputs=[msg, chatbot] | |
) | |
demo.launch() | |