AyoubChLin's picture
[INIT]
488f910
raw
history blame
2.7 kB
# # Cell 2 - Login Setup
# from huggingface_hub import login
# from dotenv import load_dotenv
# import os
# load_dotenv()
# login(token=os.getenv("HUGGINGFACEHUB_API_TOKEN"), add_to_git_credential=True)
# from embedding import embeddings
# from db.chroma import load_and_setup_db,search_cases
# from chat.chat.hermes_llm import ChatManager
# VECTOR_DB_PATH = os.getenv("VECTOR_DB_PATH")
# vector_store = load_and_setup_db(VECTOR_DB_PATH,embeddings)
# query = "somthing"
# result = search_cases(vectorstore=vector_store,query=query,k=1)
# legal_chat = ChatManager(temperature=0.1)
# respose = legal_chat.get_response(legal_chat[0]['content'],query=query)
import gradio as gr
import os
from huggingface_hub import login
from dotenv import load_dotenv
from embedding import embeddings
from db.chroma import load_and_setup_db, search_cases
from chat.hermes_llm import ChatManager
# Load environment variables
load_dotenv()
# Login to Hugging Face
login(token=os.getenv("HUGGINGFACEHUB_API_TOKEN"), add_to_git_credential=True)
# Initialize components
VECTOR_DB_PATH = os.getenv("VECTOR_DB_PATH")
vector_store = load_and_setup_db(VECTOR_DB_PATH, embeddings)
legal_chat = ChatManager(temperature=0.1)
def process_query(query, chat_history):
try:
# Search relevant cases
results = search_cases(vectorstore=vector_store, query=query, k=1)
response=None
if len(results)>0:
# Get response from chat manager
response = legal_chat.get_response(results[0]['content'], query=query)
else :
response = "No Document match"
# Update chat history
chat_history.append((query, response))
return "", chat_history
except Exception as e:
return "", chat_history + [(query, f"Error: {str(e)}")]
# Create Gradio interface
with gr.Blocks(title="Legal Chat Assistant") as demo:
gr.Markdown("# Legal Chat Assistant")
gr.Markdown("Ask questions about legal cases and get AI-powered responses.")
chatbot = gr.Chatbot(
[],
elem_id="chatbot",
bubble_full_width=False,
height=400
)
with gr.Row():
query_input = gr.Textbox(
placeholder="Enter your query here...",
show_label=False,
scale=4
)
submit_btn = gr.Button("Send", scale=1)
# Set up event handlers
submit_btn.click(
process_query,
inputs=[query_input, chatbot],
outputs=[query_input, chatbot]
)
query_input.submit(
process_query,
inputs=[query_input, chatbot],
outputs=[query_input, chatbot]
)
if __name__ == "__main__":
demo.launch(share=True)