Spaces:
Build error
Build error
| import gradio as gr | |
| import os | |
| from upstash_vector import Index | |
| from openai import OpenAI | |
| import dotenv | |
| # Load environment variables | |
| dotenv.load_dotenv() | |
| # Services (keep the same as in your original code) | |
| class ConfigService: | |
| def load_config(): | |
| return { | |
| "OPENAI_API_KEY": os.getenv("OPENAI_API_KEY"), | |
| "UPSTASH_VECTOR_REST_URL": os.getenv("UPSTASH_VECTOR_REST_URL"), | |
| "UPSTASH_VECTOR_REST_TOKEN": os.getenv("UPSTASH_VECTOR_REST_TOKEN"), | |
| } | |
| class ClientService: | |
| def initialize_clients(config): | |
| return { | |
| "index": Index(url=config["UPSTASH_VECTOR_REST_URL"], token=config["UPSTASH_VECTOR_REST_TOKEN"]), | |
| "openai": OpenAI(api_key=config["OPENAI_API_KEY"]), | |
| } | |
| class EmbeddingService: | |
| def get_embedding(content, client): | |
| response = client.embeddings.create( | |
| input=content, | |
| model="text-embedding-3-large" | |
| ) | |
| return response.data[0].embedding | |
| class VectorSearchService: | |
| def search(index, vector, top_k=2): | |
| return index.query(vector=vector, top_k=top_k, include_metadata=True) | |
| class PromptService: | |
| def load_system_prompt(file_path): | |
| with open(file_path, 'r') as file: | |
| return file.read().strip() | |
| def create_chat_prompt(question, context): | |
| return f"Question: {question}\n\nContext: {context}" | |
| class ChatService: | |
| def __init__(self, clients, system_prompt): | |
| self.clients = clients | |
| self.messages = [{"role": "system", "content": system_prompt}] | |
| def ask_question(self, question): | |
| question_embedding = EmbeddingService.get_embedding(question, self.clients["openai"]) | |
| search_results = VectorSearchService.search(self.clients["index"], question_embedding) | |
| context = [] | |
| for r in search_results: | |
| if r.score > 0.7: | |
| context.append(r.metadata['content']) | |
| context = "\n".join(context) | |
| final_prompt = PromptService.create_chat_prompt(question, context) | |
| self.messages.append({"role": "user", "content": final_prompt}) | |
| chat_completion = self.clients["openai"].chat.completions.create( | |
| model="gpt-4o-mini", | |
| messages=self.messages | |
| ) | |
| response_text = chat_completion.choices[0].message.content | |
| self.messages.append({"role": "assistant", "content": response_text}) | |
| return response_text | |
| # Initialize services | |
| config = ConfigService.load_config() | |
| clients = ClientService.initialize_clients(config) | |
| system_prompt = PromptService.load_system_prompt("prompts/sys.md") | |
| chat_service = ChatService(clients, system_prompt) | |
| # Gradio interface | |
| def chatbot(message, history): | |
| response = chat_service.ask_question(message) | |
| return response | |
| demo = gr.ChatInterface( | |
| fn=chatbot, | |
| title="Vector Database Cloud Chatbot", | |
| description="Ask questions about Vector Database Cloud", | |
| theme="default", | |
| examples=[ | |
| "What is Vector Database Cloud?", | |
| "How does one-click deployment work?", | |
| "What vector databases are supported?", | |
| ], | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() |