Spaces:
Sleeping
Sleeping
import gradio as gr | |
import plotly.express as px | |
from openai import OpenAI | |
import time | |
import json | |
import sys | |
print("Script iniciado") | |
try: | |
client = OpenAI(api_key="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") | |
print("Cliente OpenAI inicializado") | |
except Exception as e: | |
print(f"Error al inicializar el cliente OpenAI: {e}") | |
sys.exit(1) | |
assistant_id = "asst_0hq3iRy6LX0YLZP0QVzg17fT" | |
print(f"ID del asistente: {assistant_id}") | |
def random_plot(): | |
print("Generando gr谩fico aleatorio") | |
df = px.data.iris() | |
fig = px.scatter(df, x="sepal_width", y="sepal_length", color="species", | |
size='petal_length', hover_data=['petal_width']) | |
return fig | |
def print_like_dislike(x: gr.LikeData): | |
print(x.index, x.value, x.liked) | |
def add_message(history, message): | |
if message["text"] is not None and message["text"].strip() != "": | |
history.append((message["text"], None)) | |
return history, gr.MultimodalTextbox(value=None, interactive=True) | |
def bot(history): | |
print("Iniciando funci贸n bot") | |
try: | |
last_message = history[-1][0] if history else "Hola" | |
print(f"脷ltimo mensaje: {last_message}") | |
thread = client.beta.threads.create() | |
print(f"Hilo creado: {thread.id}") | |
client.beta.threads.messages.create( | |
thread_id=thread.id, | |
role="user", | |
content=last_message | |
) | |
print("Mensaje del usuario a帽adido al hilo") | |
run = client.beta.threads.runs.create( | |
thread_id=thread.id, | |
assistant_id=assistant_id | |
) | |
print(f"Ejecuci贸n iniciada: {run.id}") | |
timeout = 120 | |
start_time = time.time() | |
while run.status not in ["completed", "failed", "cancelled"]: | |
if time.time() - start_time > timeout: | |
print("Tiempo de espera agotado") | |
client.beta.threads.runs.cancel(thread_id=thread.id, run_id=run.id) | |
return history + [("Lo siento, la respuesta est谩 tardando demasiado. Por favor, intenta reformular tu pregunta.", None)] | |
time.sleep(2) | |
run = client.beta.threads.runs.retrieve(thread_id=thread.id, run_id=run.id) | |
print(f"Estado de la ejecuci贸n: {run.status}") | |
if run.status == "requires_action": | |
print("La ejecuci贸n requiere una acci贸n") | |
required_actions = run.required_action.submit_tool_outputs.tool_calls | |
tool_outputs = [] | |
for action in required_actions: | |
print(f"Acci贸n requerida: {action.type}") | |
print(f"Funci贸n: {action.function.name}") | |
print(f"Argumentos: {action.function.arguments}") | |
tool_outputs.append({ | |
"tool_call_id": action.id, | |
"output": json.dumps({"status": "success", "message": "Funci贸n ejecutada correctamente"}) | |
}) | |
if tool_outputs: | |
run = client.beta.threads.runs.submit_tool_outputs( | |
thread_id=thread.id, | |
run_id=run.id, | |
tool_outputs=tool_outputs | |
) | |
else: | |
client.beta.threads.runs.cancel(thread_id=thread.id, run_id=run.id) | |
return history + [("Lo siento, el asistente requiere acciones adicionales que no puedo manejar en este momento. Por favor, intenta reformular tu pregunta.", None)] | |
if run.status != "completed": | |
print(f"La ejecuci贸n termin贸 con estado: {run.status}") | |
return history + [("Lo siento, hubo un problema al procesar tu mensaje. Por favor, intenta de nuevo o reformula tu pregunta.", None)] | |
messages = client.beta.threads.messages.list(thread_id=thread.id) | |
print("Mensajes recuperados del hilo") | |
bot_response = "" | |
for message in messages.data: | |
if message.role == "assistant": | |
for content in message.content: | |
if content.type == 'text': | |
bot_response += content.text.value + "\n" | |
if not bot_response: | |
print("No se encontr贸 respuesta del asistente") | |
bot_response = "Lo siento, no pude generar una respuesta. Por favor, intenta reformular tu pregunta." | |
print(f"Respuesta del bot: {bot_response}") | |
history[-1] = (history[-1][0], bot_response.strip()) | |
return history | |
except Exception as e: | |
print(f"Error en la funci贸n bot: {e}") | |
return history + [("Lo siento, ocurri贸 un error inesperado. Por favor, intenta de nuevo.", None)] | |
print("Definiendo la interfaz Gradio") | |
fig = random_plot() | |
with gr.Blocks(fill_height=True) as demo: | |
chatbot = gr.Chatbot( | |
elem_id="chatbot", | |
bubble_full_width=False, | |
scale=1, | |
) | |
chat_input = gr.MultimodalTextbox(interactive=True, | |
file_count="multiple", | |
placeholder="Enter message or upload file...", show_label=False) | |
chat_msg = chat_input.submit(add_message, [chatbot, chat_input], [chatbot, chat_input]) | |
bot_msg = chat_msg.then(bot, chatbot, chatbot, api_name="bot_response") | |
bot_msg.then(lambda: gr.MultimodalTextbox(interactive=True), None, [chat_input]) | |
chatbot.like(print_like_dislike, None, None) | |
print("Iniciando la aplicaci贸n Gradio") | |
demo.queue() | |
demo.launch(share=True) | |
print("Script finalizado") |