Spaces:
Sleeping
Sleeping
import os | |
import streamlit as st | |
from embedchain import App | |
os.environ["HF_HOME"] = "./models" | |
#! PROVIDE HUGGINGFACE TOKEN IF RUNNING OFFLINE | |
def conversational_ai(): | |
return App.from_config(config_path="./config_main.yaml") | |
st.title('Demo of "AI Chatbot in Law"') | |
st.caption( | |
"π A demo of conversation AI for Dhirubhai Ambani Centre for Technology and Law (DA-CTL) made by **Anurag Shukla**, **Tanaz Pathan** under guidance of **Prof. Prasenjit Majumder**" | |
) | |
if "messages" not in st.session_state: | |
st.session_state.messages = [ | |
{ | |
"role": "assistant", | |
"content": """ | |
Hi! I'm a conversational AI specializing in Indian Legal System. How may I assist you today? | |
""", | |
} | |
] | |
for message in st.session_state.messages: | |
with st.chat_message(message["role"]): | |
st.markdown(message["content"]) | |
if prompt := st.chat_input("Disclaimer: I am still a product in developement"): | |
app = conversational_ai() | |
# app.reset() | |
# print(len(app.db.get()["metadatas"])) | |
# print(len(app.get_data_sources())) | |
# quit() | |
with st.chat_message("user"): | |
st.markdown(prompt) | |
st.session_state.messages.append({"role": "user", "content": prompt}) | |
with st.chat_message("assistant"): | |
msg_placeholder = st.empty() | |
msg_placeholder.markdown("Thinking...") | |
print("Querying the Agent.\n") | |
cntxt = app.search(prompt) | |
relevant_c = [i["context"] for i in cntxt if i["metadata"]["score"] <= 1.2] | |
print( | |
"\n===================\n", | |
*relevant_c, | |
sep="\n===================\n", | |
) | |
if len(relevant_c) != 0: | |
full_response = app.llm.query( | |
input_query=prompt, | |
contexts=relevant_c, | |
) | |
full_response = full_response.rpartition("Answer:")[-1].strip() | |
else: | |
full_response = ( | |
"Sorry but I don't have relevant knowledge to asnwer that query." | |
) | |
print(f"\n#ANSWER\n\n{full_response}") | |
msg_placeholder.markdown(full_response) | |
st.session_state.messages.append( | |
{"role": "assistant", "content": full_response} | |
) | |