|
|
|
import os |
|
import openai |
|
from llama_index.core import SimpleDirectoryReader, ServiceContext, VectorStoreIndex |
|
from llama_index.core import ( |
|
SimpleDirectoryReader, |
|
VectorStoreIndex, |
|
ServiceContext, |
|
StorageContext, |
|
Response, |
|
Document, |
|
load_index_from_storage |
|
) |
|
from llama_index.llms.openai import OpenAI |
|
import gradio as gr |
|
from gradio import components |
|
import textwrap |
|
import datetime |
|
from llama_index.core import Settings |
|
from llama_index.embeddings.openai import OpenAIEmbedding |
|
from llama_index.core.node_parser import SentenceSplitter |
|
from llama_index.core.tools import RetrieverTool |
|
from llama_index.core.selectors import LLMSingleSelector, LLMMultiSelector |
|
from llama_index.core.selectors import ( |
|
PydanticMultiSelector, |
|
PydanticSingleSelector, |
|
) |
|
from llama_index.core.retrievers import ( |
|
BaseRetriever, |
|
VectorIndexRetriever, |
|
KGTableRetriever, |
|
RouterRetriever |
|
) |
|
from llama_index.core import get_response_synthesizer |
|
from llama_index.core.query_engine import RetrieverQueryEngine |
|
from llama_index.core.prompts import PromptTemplate |
|
from llama_index.core import QueryBundle |
|
from llama_index.core.schema import NodeWithScore |
|
from llama_index.core.postprocessor import SentenceTransformerRerank |
|
from typing import List |
|
|
|
openai.api_key = os.environ.get('openai_key') |
|
os.environ["OPENAI_API_KEY"] = os.environ.get('openai_key') |
|
|
|
|
|
|
|
exec(os.environ.get('storage_context')) |
|
|
|
|
|
|
|
|
|
|
|
|
|
system_prompt = """Eres el asistente virtual de la empresa Pharma.IA, responde las consultas como un experto en fabricacion de medicamentos, validacion de software e inteligencia artificial. Responder en español y en forma breve. |
|
Cuando no tengas la respuesta indica al usuario que consulte a info@pharma-ia.com.ar. Consulta: |
|
""" |
|
|
|
|
|
|
|
|
|
import gradio as gr |
|
from gradio import components |
|
import textwrap |
|
|
|
|
|
|
|
with gr.Blocks(theme='sudeepshouche/minimalist') as demo: |
|
chat_history = [] |
|
chat_history_engine = [] |
|
|
|
|
|
|
|
def refresh(chat_history): |
|
global kg_data |
|
global chat_history_engine |
|
kg_data = [] |
|
chat_history_engine = [] |
|
chat_history = [[None, None]] |
|
gr.Info("¡Listo! Ya puedes seguir chateando.") |
|
return chat_history |
|
|
|
|
|
def summarize_assistant_messages(chat_history: List[ChatMessage]) -> List[ChatMessage]: |
|
|
|
assistant_messages = [msg for msg in chat_history if msg.role == MessageRole.ASSISTANT] |
|
if len(assistant_messages) < 2: |
|
return chat_history |
|
|
|
anteultima_respuesta = assistant_messages[-2] |
|
|
|
|
|
prompt = Prompt(f"Responder SOLO con un resumen completo pero más corto del siguiente texto: \n\n{anteultima_respuesta.content}") |
|
llm = OpenAI(model="gpt-3.5-turbo", temperature=0.1) |
|
response = llm.predict(prompt) |
|
|
|
|
|
summarized_message = ChatMessage(content=response, role=MessageRole.ASSISTANT) |
|
|
|
|
|
new_chat_history = [msg if msg != anteultima_respuesta else summarized_message for msg in chat_history] |
|
|
|
return new_chat_history |
|
|
|
|
|
def respond(message, chat_history): |
|
global chat_history_engine |
|
global result_metadata |
|
|
|
|
|
if not chat_history: |
|
chat_history = [[message, ""]] |
|
else: |
|
|
|
chat_history.append([message, ""]) |
|
|
|
chat_history_engine = summarize_assistant_messages(chat_history_engine) |
|
|
|
response = chat_engine.stream_chat(message, chat_history=chat_history_engine) |
|
|
|
|
|
for text in response.response_gen: |
|
chat_history[-1][1] += text |
|
yield "", chat_history |
|
|
|
print("----------") |
|
print(memory.get_all()) |
|
|
|
|
|
|
|
|
|
gr.Markdown(""" |
|
# Asistente Pharma.IA |
|
Realiza tus consultas sobre nuestras actividades y servicios |
|
""") |
|
|
|
with gr.Row(): |
|
with gr.Column(): |
|
chatbot = gr.Chatbot(show_label=False, show_copy_button=True, ) |
|
pregunta = gr.Textbox(show_label=False, autofocus=True, placeholder="Realiza tu consulta...") |
|
pregunta.submit(respond, [pregunta, chatbot], [pregunta, chatbot]) |
|
|
|
with gr.Row(): |
|
btn_send = gr.Button(value="Preguntar", variant="primary") |
|
clear = gr.Button(value="Limpiar") |
|
|
|
|
|
btn_send.click(respond, [pregunta, chatbot], [pregunta, chatbot]) |
|
|
|
clear.click(refresh, inputs=[chatbot], outputs=[chatbot]) |
|
|
|
|
|
|
|
|
|
demo.queue() |
|
demo.launch() |