|
|
|
|
|
import os |
|
import openai |
|
from llama_index.core import SimpleDirectoryReader, ServiceContext, VectorStoreIndex |
|
from llama_index.core import ( |
|
SimpleDirectoryReader, |
|
VectorStoreIndex, |
|
ServiceContext, |
|
StorageContext, |
|
Response, |
|
Document, |
|
load_index_from_storage |
|
) |
|
from llama_index.llms.openai import OpenAI |
|
import gradio as gr |
|
from gradio import components |
|
import textwrap |
|
import datetime |
|
from llama_index.core import Settings |
|
from llama_index.embeddings.openai import OpenAIEmbedding |
|
from llama_index.core.node_parser import SentenceSplitter |
|
from llama_index.core.tools import RetrieverTool |
|
from llama_index.core.selectors import LLMSingleSelector, LLMMultiSelector |
|
from llama_index.core.selectors import ( |
|
PydanticMultiSelector, |
|
PydanticSingleSelector, |
|
) |
|
from llama_index.core.retrievers import ( |
|
BaseRetriever, |
|
VectorIndexRetriever, |
|
KGTableRetriever, |
|
RouterRetriever |
|
) |
|
from llama_index.core import get_response_synthesizer |
|
from llama_index.core.query_engine import RetrieverQueryEngine |
|
from llama_index.core.prompts import PromptTemplate |
|
from llama_index.core import QueryBundle |
|
from llama_index.core.schema import NodeWithScore |
|
from llama_index.core.postprocessor import SentenceTransformerRerank |
|
from typing import List |
|
|
|
openai.api_key = os.environ.get('openai_key') |
|
os.environ["OPENAI_API_KEY"] = os.environ.get('openai_key') |
|
|
|
|
|
|
|
exec(os.environ.get('storage_context')) |
|
|
|
|
|
|
|
|
|
|
|
|
|
prompt = """Eres el asistente virtual de la empresa Pharma.IA, responde las consultas como un experto en fabricacion de medicamentos, validacion de software e inteligencia artificial. Responder en español. |
|
Cuando no tengas la respuesta indica al usuario que consulte a info@pharma-ia.com.ar. Consulta: |
|
""" |
|
|
|
|
|
|
|
|
|
import gradio as gr |
|
from gradio import components |
|
import textwrap |
|
|
|
|
|
def responder(pregunta): |
|
try: |
|
|
|
respuesta = query_engine.query(prompt + pregunta) |
|
|
|
|
|
partial_message = "" |
|
for chunk in respuesta.response_gen: |
|
partial_message += chunk |
|
yield partial_message |
|
except Exception as e: |
|
|
|
yield "Por favor, intenta realizar una pregunta más acotada." |
|
|
|
|
|
|
|
with gr.Blocks(theme='sudeepshouche/minimalist') as demo: |
|
|
|
gr.Markdown(""" |
|
# Asistente Pharma.IA |
|
Realiza tus consultas sobre nuestras actividades y servicios |
|
""") |
|
with gr.Row(): |
|
with gr.Column(): |
|
pregunta = gr.Text(label="Consultas", placeholder='Escribe tu pregunta aquí...') |
|
|
|
with gr.Row(): |
|
btn_send = gr.Button(value="Preguntar", variant="primary") |
|
clear = gr.Button(value="Limpiar") |
|
|
|
|
|
|
|
with gr.Column(): |
|
response = gr.Textbox(label="Respuesta") |
|
|
|
btn_send.click(responder, inputs=[pregunta], outputs=[response]) |
|
clear.click(lambda: None, None, pregunta, queue=False) |
|
|
|
demo.queue() |
|
demo.launch() |