your-ai-solution's picture
Update app.py
6ce2095 verified
import gradio as gr
import os
from langchain_huggingface import HuggingFaceEmbeddings
from langchain_community.vectorstores import FAISS
import requests
# Paths
PROCESSED_DATA_DIR = "data/preprocessed/"
API_URL = "https://api-inference.huggingface.co/models/gpt2"
# Load Hugging Face API key from environment variable
HUGGINGFACE_API_KEY = os.getenv("HF_API_TOKEN")
if not HUGGINGFACE_API_KEY:
raise ValueError("Hugging Face API token is not set. Please ensure HF_API_TOKEN is added as a secret.")
headers = {"Authorization": f"Bearer {HUGGINGFACE_API_KEY}"}
# Load FAISS Index
def load_faiss_index(processed_data_dir):
"""
Load the FAISS index and embedding model.
"""
embedding_model = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
vector_store = FAISS.load_local(
processed_data_dir, embedding_model, allow_dangerous_deserialization=True
)
return vector_store
vector_store = load_faiss_index(PROCESSED_DATA_DIR)
# Query GPT-2 Model via Hugging Face API
def query_huggingface_api(prompt):
"""
Query the Hugging Face GPT-2 model via the Inference API.
"""
response = requests.post(API_URL, headers=headers, json={"inputs": prompt})
if response.status_code == 200:
return response.json()[0]["generated_text"]
else:
return f"Error {response.status_code}: {response.text}"
# Generate Response
def generate_response(query):
"""
Generate a response using FAISS and GPT-2.
"""
retriever = vector_store.as_retriever()
retrieved_chunks = retriever.get_relevant_documents(query)
if not retrieved_chunks:
return "No relevant documents found."
# Combine retrieved chunks into context
context = "\n\n".join([doc.page_content for doc in retrieved_chunks[:3]])[:1500]
prompt = (
f"You are a legal expert specializing in business laws and the legal environment. "
f"Using the following context, answer the question concisely and accurately.\n\n"
f"Context:\n{context}\n\nQuestion: {query}\n\nAnswer:"
)
return query_huggingface_api(prompt)
# Gradio Interface for QA Bot
def qa_bot(query):
"""
Gradio wrapper function for the QA Bot.
"""
return generate_response(query)
# Define Gradio Interface
demo = gr.Interface(
fn=qa_bot,
inputs=gr.Textbox(label="Enter your question:"),
outputs=gr.Textbox(label="Answer"),
title="Business Law QA Bot",
description="Ask questions about business laws and the legal environment. Powered by FAISS and GPT-2.",
)
if __name__ == "__main__":
demo.launch()