Spaces:
Runtime error
Runtime error
| # app.py | |
| import os | |
| import gradio as gr | |
| import openai | |
| import logging | |
| from pinecone import Pinecone, ServerlessSpec | |
| from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, StorageContext | |
| from llama_index.vector_stores.pinecone import PineconeVectorStore | |
| # 1. Configure Logging | |
| logging.basicConfig(level=logging.INFO) | |
| # 2. Set API Keys (Use environment variables for security) | |
| os.environ["OPENAI_API_KEY"] | |
| os.environ["PINECONE_API_KEY"] | |
| # 3. Initialize Pinecone | |
| pc = Pinecone(api_key=os.environ["PINECONE_API_KEY"]) | |
| index_name = "quickstart" | |
| # Delete & Create Index if needed | |
| if index_name in [idx["name"] for idx in pc.list_indexes()]: | |
| pc.delete_index(index_name) | |
| pc.create_index( | |
| name=index_name, | |
| dimension=1536, | |
| metric="euclidean", | |
| spec=ServerlessSpec(cloud="aws", region="us-east-1"), | |
| ) | |
| pinecone_index = pc.Index(index_name) | |
| # 4. Load Documents | |
| documents = SimpleDirectoryReader("./data").load_data() | |
| # 5. Create Vector Index | |
| vector_store = PineconeVectorStore(pinecone_index=pinecone_index) | |
| storage_context = StorageContext.from_defaults(vector_store=vector_store) | |
| index = VectorStoreIndex.from_documents(documents, storage_context=storage_context) | |
| # 6. Create Query Engine | |
| query_engine = index.as_query_engine() | |
| # 7. Gradio Interface | |
| def query_document(user_query): | |
| response = query_engine.query(user_query) | |
| return str(response) | |
| # 8. Gradio App | |
| interface = gr.Interface( | |
| fn=query_document, | |
| inputs=gr.Textbox(label="Enter your query", placeholder="Ask something about the essay..."), | |
| outputs=gr.Textbox(label="Response"), | |
| title="HR Chat Bot" | |
| ) | |
| if __name__ == "__main__": | |
| interface.launch() | |