File size: 2,194 Bytes
08357ff
24b744c
08357ff
24b744c
478e016
 
 
 
24b744c
05d3d0d
882f683
24b744c
08357ff
 
 
 
24b744c
 
 
 
 
52f5131
24b744c
 
 
 
 
 
 
52f5131
24b744c
 
 
882f683
05d3d0d
478e016
 
 
ff2e27d
08357ff
478e016
 
 
24b744c
 
478e016
c6287f2
24b744c
 
c6287f2
478e016
05d3d0d
478e016
 
c6287f2
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
from llama_index import Document, SimpleDirectoryReader, GPTListIndex, GPTSimpleVectorIndex, LLMPredictor, PromptHelper, ServiceContext
from llama_index import download_loader
from langchain.chat_models import ChatOpenAI
from pathlib import Path
import gradio as gr
import sys
import os

dataFiles = ["RetroApril","RetroMarch", "Snowflake", "Datadog", "Databricks", "SplunkProducts", "SplunkEnterprise"]

cache = {}

prompt_helper = PromptHelper(4096, 256, 20)
llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=0, model_name="gpt-3.5-turbo"))
service_context = ServiceContext.from_defaults(llm_predictor=llm_predictor, prompt_helper=prompt_helper)

def indexFile(filePath):
    PandasCSVReader = download_loader("PandasCSVReader")
    loader = PandasCSVReader()
    documents = loader.load_data(file=Path('./csv/' + filePath + '.csv'))
    index = GPTSimpleVectorIndex.from_documents(documents)
    index.save_to_disk("index/" + filePath + '.json')

def loadData():
    """
    Load indices from disk for improved performance
    """
    for file in dataFiles :
        print("Loading file "+ file)
        indexFilePath= "index/" + file + '.json'
        if not os.path.exists(indexFilePath):
            indexFile(file)
        cache[file]= GPTSimpleVectorIndex.load_from_disk(indexFilePath)    

def chatbot(indexName, input_text):
    """
    Chatbot function that takes in a prompt and returns a response
    """
    index = cache[indexName]
    response = index.query(input_text, response_mode="compact", service_context=service_context)
    return response.response


loadData()

iface = gr.Interface(fn=chatbot,
                     inputs= [ 
                         gr.Dropdown(dataFiles, 
                                     type="value", value="RetroApril", label="Select Pulse Data"), 
                         gr.Textbox(lines=7, label="Ask any question", placeholder='What is the summary?')],
                     outputs="text",
                     title="NLP Demo for Chat Interface")


iface.launch(auth=('axiamatic', os.environ['LOGIN_PASS']), 
             auth_message='For access, please check my Slack profile or contact me in Slack.', 
             share=False)