File size: 1,970 Bytes
91532c2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 |
from llama_index import Document, SimpleDirectoryReader, GPTListIndex, GPTSimpleVectorIndex, GPTTreeIndex, LLMPredictor, PromptHelper, ServiceContext
from llama_index import download_loader
from langchain import OpenAI
from pathlib import Path
import gradio as gr
import sys
import os
import logging
logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=os.environ.get("LOGLEVEL", "DEBUG"))
#dataFiles = ["RetroApril","RetroMarch", "Snowflake", "Datadog", "Databricks", "SplunkProducts", "SplunkEnterprise"]
dataFiles = ["Lastpass", "RetroApril","RetroMarch"]
cache = {}
def indexFile(filePath):
PandasCSVReader = download_loader("PandasCSVReader")
loader = PandasCSVReader()
documents = loader.load_data(file=Path('./csv/' + filePath + '.csv'))
index = GPTTreeIndex.from_documents(documents)
index.save_to_disk("treeIndex/" + filePath + '.json')
def loadData():
"""
Load indices from disk for improved performance
"""
for file in dataFiles :
print("Loading file "+ file)
indexFilePath= "treeIndex/" + file + '.json'
if not os.path.exists(indexFilePath):
indexFile(file)
cache[file]= GPTTreeIndex.load_from_disk(indexFilePath)
def chatbot(indexName, input_text):
"""
Chatbot function that takes in a prompt and returns a response
"""
index = cache[indexName]
response = index.query(input_text, response_mode="compact")
return response.response
log = logging.getLogger(__name__)
loadData()
iface = gr.Interface(fn=chatbot,
inputs= [
gr.Dropdown(dataFiles,
type="value", value="Lastpass", label="Select Pulse Data"),
gr.Textbox(lines=7, label="Ask any question", placeholder='What is the summary?')],
outputs="text",
title="NLP Demo for Chat Interface")
iface.launch(share=False) |