Spaces:
Runtime error
Runtime error
import gradio as gr | |
import requests | |
import json | |
from decouple import Config | |
# Function to interact with Vectara API | |
def query_vectara(question, chat_history, uploaded_file): | |
# Handle file upload to Vectara | |
customer_id = config('CUSTOMER_ID') # Read from .env file | |
corpus_id = config('CORPUS_ID') # Read from .env file | |
api_key = config('API_KEY') # Read from .env file | |
url = f"https://api.vectara.io/v1/upload?c={customer_id}&o={corpus_id}" | |
post_headers = { | |
"x-api-key": api_key, | |
"customer-id": customer_id | |
} | |
files = { | |
"file": (uploaded_file.name, uploaded_file), | |
"doc_metadata": (None, json.dumps({"metadata_key": "metadata_value"})), # Replace with your metadata | |
} | |
response = requests.post(url, files=files, verify=True, headers=post_headers) | |
if response.status_code == 200: | |
upload_status = "File uploaded successfully" | |
else: | |
upload_status = "Failed to upload the file" | |
# Get the user's message from the chat history | |
user_message = chat_history[-1][0] | |
query_body = { | |
"query": [ | |
{ | |
"query": user_message, # Use the user's message as the query | |
"start": 0, | |
"numResults": 10, | |
"corpusKey": [ | |
{ | |
"customerId": customer_id, | |
"corpusId": corpus_id, | |
"lexicalInterpolationConfig": {"lambda": 0.025} | |
} | |
] | |
} | |
] | |
} | |
api_endpoint = "https://api.vectara.io/v1/query" | |
return f"{upload_status}\n\nResponse from Vectara API: {response.text}" | |
# from vectara_retriever import VectaraRetriever # Assuming VectaraRetriever is in vectara_retriever.py | |
# Function to interact with Vectara API | |
# def query_vectara(question, chat_history, uploaded_file): | |
# # Handle file upload to Vectara | |
# | |
# # Get the user's message from the chat history | |
# user_message = chat_history[-1][0] | |
# | |
# # Create a VectaraRetriever instance | |
# retriever = VectaraRetriever(index=your_vectara_index) # Replace with your VectaraIndex instance | |
# | |
# # Create a QueryBundle with the user's message | |
# query_bundle = QueryBundle(query_str=user_message) | |
# | |
# # Retrieve the top k most similar nodes | |
# top_k_nodes = retriever._retrieve(query_bundle) | |
# | |
# # Format the nodes for display | |
# responses = "\n".join([f"{node.node.text} (score: {node.score})" for node in top_k_nodes]) | |
# | |
# return f"{upload_status}\n\nResponse from Vectara API: {responses}" | |
# Create a Gradio ChatInterface with a text input, a file upload input, and a text output | |
iface = gr.Interface( | |
fn=query_vectara, | |
inputs=[gr.Textbox(label="Input Text"), gr.File(label="Upload a file")], | |
outputs=gr.Textbox(label="Output Text"), | |
title="Vectara Chatbot", | |
description="Ask me anything using the Vectara API!" | |
) | |
iface.launch() |