from os import getenv as os_getenv, path as os_path from time import sleep from json import loads as json_loads import gradio as gr from openai import OpenAI client = OpenAI() assistant_id = os_getenv("OPENAI_ASSISTANT_ID") vector_id = os_getenv("OPENAI_VECTOR_ID") ANS_SHORT = "Resumida" ANS_REGULAR = "Normal" ANS_LONG = "Detallada" def chat(user_message, history, state, long_or_short): if (state is None) or (not state['user']): gr.Warning("You need to authenticate first") yield else: thread = state['thread'] if thread is None: thread = client.beta.threads.create( tool_resources={ "file_search": { "vector_store_ids": [vector_id] } }, ) state['thread'] = thread client.beta.threads.messages.create( thread_id=thread.id, role="user", content=user_message, ) details = "" if state["user"] != "anonymous": details += f"User's name is {state['user']}." if long_or_short == ANS_SHORT: details = "Please make a complete but short answer" elif long_or_short == ANS_LONG: details = "Please make complete and detailed answer. Long is better than short. Use tables and lists if you need it" with client.beta.threads.runs.stream( thread_id=thread.id, assistant_id=assistant_id, additional_instructions=details, ) as stream: total = '' for delta in stream.text_deltas: total += delta yield total def chat_nostream(user_message, history, state): if (state is None) or (not state['user']): gr.Warning("You need to authenticate first") yield else: thread = state['thread'] if thread is None: thread = client.beta.threads.create( tool_resources={ "file_search": { "vector_store_ids": [vector_id] } } ) state['thread'] = thread # Add the user's message to the thread client.beta.threads.messages.create( thread_id=thread.id, role="user", content=user_message, ) # Run the Assistant run = client.beta.threads.runs.create(thread_id=thread.id, assistant_id=assistant_id) while True: run_status = client.beta.threads.runs.retrieve(thread_id=thread.id, run_id=run.id) print(f"Run status: {run_status.status}") if run_status.status == 'completed': break sleep(5) messages = client.beta.threads.messages.list(thread_id=thread.id) response = messages.data[0].content[0].text.value yield response def new_state(): return gr.State({ "user": None, "thread": None, }) def auth(token, state): tokens=os_getenv("APP_TOKENS", None) if tokens is None: state["user"] = "anonymous" else: tokens=json_loads(tokens) state["user"] = tokens.get(token, None) return "", state AUTH_JS = """function auth_js(token, state) { if (!!document.location.hash) { token = document.location.hash document.location.hash="" } return [token, state] } """ theme = gr.Theme.from_hub("freddyaboulton/dracula_revamped@0.3.9") theme.set( color_accent_soft="#6272a4", button_primary_text_color="*button_secondary_text_color", button_primary_background_fill="*button_secondary_background_fill") with gr.Blocks( title="Dr. Luis Chiozza - Medicina y Psicoanalisis", fill_height=True, theme=theme, theme=gr.themes.Base()) as demo: state = new_state() gr.HTML("""

Dr. Luis Chiozza - Medicina y Psicoanalisis

Habla con la colección de Medicina y Psicoanalisis del Dr. Luis Chiozza

""") with gr.Row(variant="compact"): gr.HTML("Largo de la respuesta") long_or_short = gr.Radio( choices = [ANS_SHORT, ANS_REGULAR, ANS_LONG], value = ANS_REGULAR, show_label=False, container=False, label = "Largo de la respuesta", scale=3) gr.ChatInterface( chat, additional_inputs=[state, long_or_short], examples=[ ["Qué diferencias hay entre el cuerpo y el Alma?"], ["Cuáles son las distintas funciones de los órganos del cuerpo y su relación con el alma?"], ], ) token = gr.Textbox(visible=False) demo.load(auth, [token,state], [token,state], js=AUTH_JS) demo.launch( height=700, allowed_paths=["."])