Spaces:
Runtime error
Runtime error
from pathlib import Path | |
from llama_index import ServiceContext, download_loader, GPTVectorStoreIndex, LLMPredictor | |
from llama_index import StorageContext, load_index_from_storage | |
from langchain import OpenAI | |
import gradio as gr | |
import os | |
# My personal OpenAI API key - do not misuse :P | |
os.environ["OPENAI_API_KEY"] = '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' | |
num_outputs = 512 | |
llm_predictor = LLMPredictor(llm=OpenAI(temperature=0.4, model_name="text-davinci-003", max_tokens=num_outputs)) | |
service_context = ServiceContext.from_defaults(llm_predictor=llm_predictor) | |
def construct_index(): | |
SimpleCSVReader = download_loader("SimpleCSVReader") | |
loader = SimpleCSVReader() | |
docs = loader.load_data(file=Path('./docs/articles.csv')) | |
index = GPTVectorStoreIndex.from_documents(docs, service_context=service_context) | |
index.storage_context.persist() | |
return index | |
def chatbot(input_text): | |
storage_context = StorageContext.from_defaults(persist_dir=Path("./storage/")) | |
index = load_index_from_storage(storage_context, service_context=service_context) | |
query_engine = index.as_query_engine(response_mode='compact') | |
response = query_engine.query(input_text) | |
return response.response | |
# uncomment on first run, when generating an index | |
index = construct_index() | |
iface = gr.Interface(fn=chatbot, | |
inputs=gr.inputs.Textbox(lines=7, label="Enter your text"), | |
outputs="text", | |
title="147 AI Chatbot") | |
iface.launch() | |
# with gr.Blocks() as demo: | |
# chatbot = gr.Chatbot() | |
# msg = gr.Textbox() | |
# clear = gr.Button("Clear") | |
# def respond(message, chat_history): | |
# storage_context = StorageContext.from_defaults(persist_dir=Path("./storage/")) | |
# index = load_index_from_storage(storage_context, service_context=service_context) | |
# query_engine = index.as_query_engine(response_mode='compact') | |
# response = query_engine.query(message) | |
# chat_history.append((message, response)) | |
# return "", chat_history | |
# msg.submit(respond, [msg, chatbot], [msg, chatbot]) | |
# clear.click(lambda: None, None, chatbot, queue=False) | |
# demo.launch() | |