|
from llama_index import Document, SimpleDirectoryReader, GPTListIndex, GPTSimpleVectorIndex, LLMPredictor, PromptHelper, ServiceContext |
|
from llama_index import download_loader |
|
from langchain.chat_models import ChatOpenAI |
|
from pathlib import Path |
|
import gradio as gr |
|
import sys |
|
import os |
|
|
|
dataFiles = ["RetroApril","RetroMarch", "Snowflake", "Datadog", "Databricks", "SplunkProducts", "SplunkEnterprise"] |
|
|
|
cache = {} |
|
|
|
prompt_helper = PromptHelper(4096, 256, 20) |
|
llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=0, model_name="gpt-3.5-turbo")) |
|
service_context = ServiceContext.from_defaults(llm_predictor=llm_predictor, prompt_helper=prompt_helper) |
|
|
|
def indexFile(filePath): |
|
PandasCSVReader = download_loader("PandasCSVReader") |
|
loader = PandasCSVReader() |
|
documents = loader.load_data(file=Path('./csv/' + filePath + '.csv')) |
|
index = GPTSimpleVectorIndex.from_documents(documents) |
|
index.save_to_disk("index/" + filePath + '.json') |
|
|
|
def loadData(): |
|
""" |
|
Load indices from disk for improved performance |
|
""" |
|
for file in dataFiles : |
|
print("Loading file "+ file) |
|
indexFilePath= "index/" + file + '.json' |
|
if not os.path.exists(indexFilePath): |
|
indexFile(file) |
|
cache[file]= GPTSimpleVectorIndex.load_from_disk(indexFilePath) |
|
|
|
def chatbot(indexName, input_text): |
|
""" |
|
Chatbot function that takes in a prompt and returns a response |
|
""" |
|
index = cache[indexName] |
|
response = index.query(input_text, response_mode="compact", service_context=service_context) |
|
return response.response |
|
|
|
|
|
loadData() |
|
|
|
iface = gr.Interface(fn=chatbot, |
|
inputs= [ |
|
gr.Dropdown(dataFiles, |
|
type="value", value="RetroApril", label="Select Pulse Data"), |
|
gr.Textbox(lines=7, label="Ask any question", placeholder='What is the summary?')], |
|
outputs="text", |
|
title="NLP Demo for Chat Interface") |
|
|
|
|
|
iface.launch(auth=('axiamatic', os.environ['LOGIN_PASS']), |
|
auth_message='For access, please check my Slack profile or contact me in Slack.', |
|
share=False) |