Spaces:
Sleeping
Sleeping
import os | |
import openai | |
import gradio as gr | |
from llama_index.core import ( | |
VectorStoreIndex, | |
SimpleDirectoryReader, | |
StorageContext, | |
load_index_from_storage, | |
) | |
# Set OpenAI API key | |
os.environ["OPENAI_API_KEY"] = "sk-proj-xJs72X2tvslF5qqbIg1pT3BlbkFJ5Om15nbJuwQSB04DrYfs" | |
openai.api_key = 'sk-proj-xJs72X2tvslF5qqbIg1pT3BlbkFJ5Om15nbJuwQSB04DrYfs' | |
# Set the directory for persistent storage | |
PERSIST_DIR = "./storage" | |
# Load or create the document index | |
if not os.path.exists(PERSIST_DIR): | |
documents = SimpleDirectoryReader("docs").load_data() | |
index = VectorStoreIndex.from_documents(documents,show_progress=True) | |
index.storage_context.persist(persist_dir=PERSIST_DIR) | |
else: | |
storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR) | |
index = load_index_from_storage(storage_context) | |
# Query engine | |
query_engine = index.as_query_engine() | |
# Define the chatbot function | |
def chatbot_func(query, *args, **kwargs): | |
response = query_engine.query(query) | |
return str(response) | |
# Replaced gr.Interface with gr.ChatInterface | |
iface = gr.ChatInterface(chatbot_func,chatbot=gr.Chatbot(height=800,placeholder="<strong>GPT-Based Chatbot</strong><br>Ask Me Anything"),) | |
iface.launch(share=True) | |