|
|
|
|
|
import os |
|
import gradio as gr |
|
import openai |
|
import logging |
|
from pinecone import Pinecone, ServerlessSpec |
|
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, StorageContext |
|
from llama_index.vector_stores.pinecone import PineconeVectorStore |
|
|
|
|
|
logging.basicConfig(level=logging.INFO) |
|
|
|
|
|
os.environ["OPENAI_API_KEY"] |
|
os.environ["PINECONE_API_KEY"] |
|
|
|
|
|
pc = Pinecone(api_key=os.environ["PINECONE_API_KEY"]) |
|
index_name = "quickstart" |
|
|
|
|
|
if index_name in [idx["name"] for idx in pc.list_indexes()]: |
|
pc.delete_index(index_name) |
|
|
|
pc.create_index( |
|
name=index_name, |
|
dimension=1536, |
|
metric="euclidean", |
|
spec=ServerlessSpec(cloud="aws", region="us-east-1"), |
|
) |
|
|
|
pinecone_index = pc.Index(index_name) |
|
|
|
|
|
documents = SimpleDirectoryReader("./data").load_data() |
|
|
|
|
|
vector_store = PineconeVectorStore(pinecone_index=pinecone_index) |
|
storage_context = StorageContext.from_defaults(vector_store=vector_store) |
|
index = VectorStoreIndex.from_documents(documents, storage_context=storage_context) |
|
|
|
|
|
query_engine = index.as_query_engine() |
|
|
|
|
|
def query_document(user_query): |
|
response = query_engine.query(user_query) |
|
return str(response) |
|
|
|
|
|
interface = gr.Interface( |
|
fn=query_document, |
|
inputs=gr.Textbox(label="Enter your query", placeholder="Ask something about the essay..."), |
|
outputs=gr.Textbox(label="Response"), |
|
title="Ask Paul Graham (Powered by LlamaIndex + Pinecone)" |
|
) |
|
|
|
if __name__ == "__main__": |
|
interface.launch() |