|
|
|
import os |
|
import openai |
|
from llama_index.core import SimpleDirectoryReader, ServiceContext, VectorStoreIndex |
|
from llama_index.core import ( |
|
SimpleDirectoryReader, |
|
VectorStoreIndex, |
|
ServiceContext, |
|
StorageContext, |
|
Response, |
|
Document, |
|
load_index_from_storage |
|
) |
|
from llama_index.llms.openai import OpenAI |
|
import gradio as gr |
|
from gradio import components |
|
import textwrap |
|
import datetime |
|
|
|
openai.api_key = os.environ.get('openai_key') |
|
os.environ["OPENAI_API_KEY"] = os.environ.get('openai_key') |
|
|
|
|
|
|
|
exec(os.environ.get('storage_context')) |
|
|
|
|
|
|
|
|
|
|
|
|
|
storage_context = StorageContext.from_defaults(persist_dir = "./") |
|
index = load_index_from_storage(storage_context, index_id="vector_index") |
|
print("Se cargó el index.") |
|
|
|
|
|
|
|
|
|
llm=OpenAI(model="gpt-3.5-turbo", temperature=0.1) |
|
query_engine = index.as_query_engine(llm=llm, streaming=True) |
|
|
|
prompt = """Responder en español: """ |
|
|
|
|
|
|
|
|
|
|
|
import gradio as gr |
|
from gradio import components |
|
import textwrap |
|
|
|
|
|
def responder(pregunta): |
|
|
|
respuesta = query_engine.query(prompt + pregunta) |
|
|
|
|
|
partial_message = "" |
|
for chunk in respuesta.response_gen: |
|
partial_message += chunk |
|
yield partial_message |
|
|
|
|
|
|
|
|
|
|
|
with gr.Blocks(theme='sudeepshouche/minimalist') as demo: |
|
|
|
gr.Markdown(""" |
|
# Pharma.IA |
|
Realiza preguntas |
|
""") |
|
with gr.Row(): |
|
with gr.Column(): |
|
pregunta = gr.Text(label="Pregunta", placeholder='Escribe tu pregunta aquí...') |
|
|
|
with gr.Row(): |
|
btn_send = gr.Button(value="Preguntar", variant="primary") |
|
clear = gr.Button(value="Limpiar") |
|
|
|
|
|
|
|
with gr.Column(): |
|
response = gr.Textbox(label="Respuesta") |
|
|
|
btn_send.click(responder, inputs=[pregunta], outputs=[response]) |
|
clear.click(lambda: None, None, pregunta, queue=False) |
|
|
|
demo.queue() |
|
demo.launch() |