pulseDemo / app.py
svummidi's picture
September data added
746c73d
raw
history blame
2.94 kB
import os
from pathlib import Path
import gradio as gr
from llama_index import VectorStoreIndex, StorageContext, download_loader, load_index_from_storage
dataFiles = ["RetroSep", "RetroAug", "RetroJune", "OnCall", "RetroMay", "RetroApril", "RetroMarch"]
cache = {}
def index_file(filePath, index_root):
csv_file = f'./raw/{filePath}.csv'
pdf_file = f'./raw/{filePath}.pdf'
documents = None
storage_context = StorageContext.from_defaults()
if os.path.exists(csv_file):
PandasCSVReader = download_loader("PandasCSVReader")
loader = PandasCSVReader()
documents = loader.load_data(file=csv_file)
print(f"Loading from CSV {csv_file}")
elif os.path.exists(pdf_file):
PDFReader = download_loader("PDFReader")
loader = PDFReader()
documents = loader.load_data(file=Path(pdf_file))
# PyMuPDFReader = download_loader("PyMuPDFReader")
# loader = PyMuPDFReader()
# documents = loader.load(file_path=Path(pdf_file), metadata=False)
print(f"Loading from PDF {pdf_file}")
index = VectorStoreIndex.from_documents(documents=documents, storage_context=storage_context)
save_location = f"{index_root}/{filePath}"
if not os.path.exists(save_location):
os.makedirs(save_location)
storage_context.persist(save_location)
return index
def loadData():
"""
Load indices from disk for improved performance
"""
index_root = "./index_v2"
for file in dataFiles:
index_file_path = f'{index_root}/{file}'
index = None
if not os.path.exists(index_file_path):
print("Creating index " + index_file_path)
index = index_file(file, index_root)
else:
print("Loading from existing index " + index_file_path)
storage_context = StorageContext.from_defaults(persist_dir=index_file_path)
index = load_index_from_storage(storage_context)
cache[file] = index
def chatbot(indexName, input_text):
"""
Chatbot function that takes in a prompt and returns a response
"""
index = cache[indexName]
response = index.as_query_engine().query(input_text)
return response.response
loadData()
iface = gr.Interface(fn=chatbot,
inputs=[
gr.Dropdown(dataFiles,
type="value", value="RetroSep", label="Select Pulse Data"),
gr.Textbox(lines=7, label="Ask any question", placeholder='What is the summary?')],
outputs=gr.Textbox(lines=13, label="Response"),
title="NLP Demo for Chat Interface")
if 'LOGIN_PASS' in os.environ:
iface.launch(auth=('axiamatic', os.environ['LOGIN_PASS']),
auth_message='For access, please check my Slack profile or contact me in Slack.',
share=False)
else:
iface.launch(share=False)