|
import streamlit as st |
|
import os |
|
import tempfile |
|
from utils.llm import model_pipeline, load_memory, typewriter |
|
from utils.utils import load_documents |
|
from dotenv import load_dotenv |
|
load_dotenv() |
|
|
|
st.title("Search the right candidates!") |
|
|
|
if "messages" not in st.session_state: |
|
st.session_state.messages = [] |
|
|
|
if "memory" not in st.session_state: |
|
st.session_state["memory"] = load_memory() |
|
|
|
uploaded_file = st.file_uploader("Choose a PDF...", type="pdf") |
|
if uploaded_file is not None: |
|
|
|
temp_dir = tempfile.mkdtemp() |
|
file_name = st.text_input("Enter File name: ", "uploaded_file.pdf") |
|
st.session_state["file_name"] = file_name |
|
|
|
with open(os.path.join(temp_dir, 'uploaded_file.pdf'), 'wb') as f: |
|
f.write(uploaded_file.getvalue()) |
|
|
|
|
|
load_documents(file_path=os.path.join(temp_dir, 'uploaded_file.pdf')) |
|
st.session_state.messages.append({"role": "assistant", "content": "I have loaded the resume."}) |
|
del uploaded_file |
|
|
|
for message in st.session_state.messages: |
|
with st.chat_message(message["role"]): |
|
st.markdown(message["content"]) |
|
|
|
if query := st.chat_input("Whom are you looking for today?"): |
|
st.session_state.messages.append({"role": "user", "content": query}) |
|
with st.chat_message("user"): |
|
st.markdown(query) |
|
with st.chat_message("assistant"): |
|
message_placeholder = st.empty() |
|
with st.spinner('Pulling out amazing candidates behind the bushes...'): |
|
full_response = "" |
|
chain = model_pipeline(st.session_state["memory"]) |
|
response = chain.invoke(query) |
|
for key in response: |
|
st.markdown(f"##### :blue[{key}:] ") |
|
typewriter(response[key], key, speed=9) |
|
full_response += (f"##### :blue[{key}:]\n{response[key]}\n" or "") |
|
|
|
st.session_state.messages.append({"role": "assistant", "content": full_response}) |
|
st.session_state["memory"].save_context({"query": query}, {"output": full_response}) |
|
|