|
from gpt_index import Document, SimpleDirectoryReader, GPTListIndex, GPTSimpleVectorIndex, LLMPredictor, PromptHelper |
|
from langchain import OpenAI |
|
import openai |
|
import gradio as gr |
|
import sys |
|
import os |
|
|
|
|
|
cache = {} |
|
cache["RetroFeedback"]= GPTSimpleVectorIndex.load_from_disk('RetroFeedback.json')) |
|
cache["Snowflake"]= GPTSimpleVectorIndex.load_from_disk('Snowflake.json')) |
|
cache["Datadog"]= GPTSimpleVectorIndex.load_from_disk('Datadog.json')) |
|
cache["Databricks"]= GPTSimpleVectorIndex.load_from_disk('Databricks.json')) |
|
|
|
def chatbot(indexName, input_text): |
|
""" |
|
Chatbot function that takes in a prompt and returns a response |
|
""" |
|
index = cache[indexName] |
|
response = index.query(input_text, response_mode="compact") |
|
return response.response |
|
|
|
|
|
iface = gr.Interface(fn=chatbot, |
|
inputs= [ |
|
gr.Dropdown(["RetroFeedback", "Snowflake", "Datadog", "Databricks"], |
|
type="value", value="RetroFeedback", label="Select Pulse Data"), |
|
gr.Textbox(lines=7, label="Ask any question", placeholder='What is the summary?')], |
|
outputs="text", |
|
title="NLP Demo for Chat Interface") |
|
|
|
|
|
iface.launch(auth=('axiamatic', os.environ['LOGIN_PASS']), |
|
auth_message='For access, please check my Slack profile or contact me in Slack.', |
|
share=False) |