Spaces:
Sleeping
Sleeping
File size: 4,788 Bytes
374b7f0 8432bf0 f59849d 565ab21 d060f2c 8432bf0 565ab21 f59849d 8432bf0 f59849d 8432bf0 d060f2c 8432bf0 f59849d 8432bf0 f59849d 8432bf0 805c967 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 |
import gradio as gr
from openai import OpenAI
import time
import os
from dotenv import load_dotenv
import json
load_dotenv()
api_key = os.getenv("OPENAI_API_KEY")
if not api_key:
raise ValueError("No se encontr贸 la API key de OpenAI. Aseg煤rate de configurar la variable de entorno OPENAI_API_KEY.")
client = OpenAI(api_key=api_key)
print("Cliente OpenAI inicializado")
assistant_id = "asst_0hq3iRy6LX0YLZP0QVzg17fT"
def add_message(history, message):
if message.strip() != "":
history.append((message, None))
return history, gr.Textbox(value="", interactive=True)
def bot(history):
print("Iniciando funci贸n bot")
try:
last_message = history[-1][0] if history else "Hola"
print(f"脷ltimo mensaje: {last_message}")
thread = client.beta.threads.create()
print(f"Hilo creado: {thread.id}")
client.beta.threads.messages.create(
thread_id=thread.id,
role="user",
content=last_message
)
print("Mensaje del usuario a帽adido al hilo")
run = client.beta.threads.runs.create(
thread_id=thread.id,
assistant_id=assistant_id
)
print(f"Ejecuci贸n iniciada: {run.id}")
timeout = 120
start_time = time.time()
while run.status not in ["completed", "failed", "cancelled"]:
if time.time() - start_time > timeout:
print("Tiempo de espera agotado")
client.beta.threads.runs.cancel(thread_id=thread.id, run_id=run.id)
return history + [("Lo siento, la respuesta est谩 tardando demasiado. Por favor, intenta reformular tu pregunta.", None)]
time.sleep(2)
run = client.beta.threads.runs.retrieve(thread_id=thread.id, run_id=run.id)
print(f"Estado de la ejecuci贸n: {run.status}")
if run.status == "requires_action":
print("La ejecuci贸n requiere una acci贸n")
required_actions = run.required_action.submit_tool_outputs.tool_calls
tool_outputs = []
for action in required_actions:
function_name = action.function.name
function_args = json.loads(action.function.arguments)
# Aqu铆 simplemente pasamos la llamada a la funci贸n al asistente
result = json.dumps({
"function_called": function_name,
"arguments": function_args
})
tool_outputs.append({
"tool_call_id": action.id,
"output": result
})
client.beta.threads.runs.submit_tool_outputs(
thread_id=thread.id,
run_id=run.id,
tool_outputs=tool_outputs
)
continue
if run.status != "completed":
print(f"La ejecuci贸n termin贸 con estado: {run.status}")
return history + [("Lo siento, hubo un problema al procesar tu mensaje. Por favor, intenta de nuevo o reformula tu pregunta.", None)]
messages = client.beta.threads.messages.list(thread_id=thread.id)
print("Mensajes recuperados del hilo")
bot_response = ""
for message in messages.data:
if message.role == "assistant":
for content in message.content:
if content.type == 'text':
bot_response += content.text.value + "\n"
if not bot_response:
print("No se encontr贸 respuesta del asistente")
bot_response = "Lo siento, no pude generar una respuesta. Por favor, intenta reformular tu pregunta."
print(f"Respuesta del bot: {bot_response}")
history[-1] = (history[-1][0], bot_response.strip())
return history
except Exception as e:
print(f"Error en la funci贸n bot: {e}")
return history + [("Lo siento, ocurri贸 un error inesperado. Por favor, intenta de nuevo.", None)]
with gr.Blocks(fill_height=True) as demo:
chatbot = gr.Chatbot(
elem_id="chatbot",
bubble_full_width=False,
scale=1,
)
chat_input = gr.Textbox(
interactive=True,
placeholder="Escribe tu mensaje aqu铆...",
show_label=False
)
chat_msg = chat_input.submit(add_message, [chatbot, chat_input], [chatbot, chat_input])
bot_msg = chat_msg.then(bot, chatbot, chatbot, api_name="bot_response")
bot_msg.then(lambda: gr.Textbox(interactive=True), None, [chat_input])
print("Iniciando la aplicaci贸n Gradio")
demo.queue()
demo.launch() |