pulseDemo / app.py
svummidi's picture
Update app.py
fcb90df
raw
history blame
1.96 kB
from llama_index import Document, SimpleDirectoryReader, GPTListIndex, GPTSimpleVectorIndex, LLMPredictor, PromptHelper, ServiceContext
from llama_index import download_loader
from langchain import OpenAI
from pathlib import Path
import gradio as gr
import sys
import os
dataFiles = ["RetroMay", "SupplierScoreCardWithGroup3", "Agreement","RetroApril","RetroMarch", "Snowflake", "Datadog", "Databricks", "SplunkProducts", "SplunkEnterprise"]
cache = {}
def indexFile(filePath):
PandasCSVReader = download_loader("PandasCSVReader")
loader = PandasCSVReader()
documents = loader.load_data(file=Path('./csv/' + filePath + '.csv'))
index = GPTSimpleVectorIndex.from_documents(documents)
index.save_to_disk("index/" + filePath + '.json')
def loadData():
"""
Load indices from disk for improved performance
"""
for file in dataFiles :
print("Loading file "+ file)
indexFilePath= "index/" + file + '.json'
if not os.path.exists(indexFilePath):
indexFile(file)
cache[file]= GPTSimpleVectorIndex.load_from_disk(indexFilePath)
def chatbot(indexName, input_text):
"""
Chatbot function that takes in a prompt and returns a response
"""
index = cache[indexName]
response = index.query(input_text, response_mode="compact")
return response.response
loadData()
iface = gr.Interface(fn=chatbot,
inputs= [
gr.Dropdown(dataFiles,
type="value", value="Agreement", label="Select Pulse Data"),
gr.Textbox(lines=7, label="Ask any question", placeholder='What is the summary?')],
outputs="text",
title="NLP Demo for Chat Interface")
iface.launch(auth=('axiamatic', os.environ['LOGIN_PASS']),
auth_message='For access, please check my Slack profile or contact me in Slack.',
share=False)