your-ai-solution's picture
Update app.py
45e9b01 verified
raw
history blame
2.57 kB
import gradio as gr
import os
from langchain_huggingface import HuggingFaceEmbeddings
from langchain_community.vectorstores import FAISS
import requests
# Paths
PROCESSED_DATA_DIR = "data/preprocessed/"
HUGGINGFACE_KEY_FILE = "configs/huggingface_api_key.txt"
API_URL = "https://api-inference.huggingface.co/models/gpt2"
# Function to read the Hugging Face API key
def read_huggingface_api_key(key_file):
try:
with open(key_file, "r") as file:
return file.readline().strip()
except FileNotFoundError:
raise FileNotFoundError(f"API key file not found: {key_file}")
except Exception as e:
raise Exception(f"Error reading API key: {e}")
HUGGINGFACE_API_KEY = read_huggingface_api_key(HUGGINGFACE_KEY_FILE)
headers = {"Authorization": f"Bearer {HUGGINGFACE_API_KEY}"}
# Load FAISS Index
def load_faiss_index(processed_data_dir):
embedding_model = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
vector_store = FAISS.load_local(
processed_data_dir, embedding_model, allow_dangerous_deserialization=True
)
return vector_store
vector_store = load_faiss_index(PROCESSED_DATA_DIR)
# Query GPT-2 Model via Hugging Face API
def query_huggingface_api(prompt):
response = requests.post(API_URL, headers=headers, json={"inputs": prompt})
if response.status_code == 200:
return response.json()[0]["generated_text"]
else:
return f"Error {response.status_code}: {response.text}"
# Generate Response
def generate_response(query):
retriever = vector_store.as_retriever()
retrieved_chunks = retriever.get_relevant_documents(query)
if not retrieved_chunks:
return "No relevant documents found."
context = "\n\n".join([doc.page_content for doc in retrieved_chunks[:3]])[:1500]
prompt = (
f"You are a legal expert specializing in business laws and the legal environment. "
f"Using the following context, answer the question concisely and accurately.\n\n"
f"Context:\n{context}\n\nQuestion: {query}\n\nAnswer:"
)
return query_huggingface_api(prompt)
# Gradio Interface for QA Bot
def qa_bot(query):
return generate_response(query)
# Define Gradio Interface
demo = gr.Interface(
fn=qa_bot,
inputs=gr.Textbox(label="Enter your question about business laws:"),
outputs=gr.Textbox(label="Answer"),
title="Business Law QA Bot",
description="Ask questions about business laws and the legal environment. Powered by FAISS and GPT-2.",
)
if __name__ == "__main__":
demo.launch()