|
from typing import Set |
|
|
|
from backend.core import run_llm |
|
import streamlit as st |
|
from streamlit_chat import message |
|
from PIL import Image |
|
from io import BytesIO |
|
import base64 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
profile_image = Image.open("Untitled design.png") |
|
|
|
|
|
st.sidebar.image(profile_image, use_column_width=True) |
|
|
|
|
|
st.sidebar.markdown( |
|
""" |
|
π» MS CS @ IUB |
|
|
|
𧳠Ex Amdocs SDE (3 Years of Experience) |
|
|
|
π― Actively looking for full time SDE / SWE / Full Stack / Data Science roles starting immediately |
|
|
|
π§ : anujmaha@iu.edu / anujsmahajan1998@gmail.com |
|
|
|
π» : Python, React, Java, Javascript, SpringBoot, AWS |
|
|
|
π : +1 8126029653 |
|
|
|
π― I am a strong Full Stack and Software developer with diverse skills and currently exploring AWS and GenAI. |
|
|
|
[![](https://content.linkedin.com/content/dam/me/business/en-us/amp/brand-site/v2/bg/LI-Bug.svg.original.svg)](https://www.linkedin.com/in/anujmaha/) |
|
[![GitHub](https://img.icons8.com/material-outlined/48/000000/github.png)](https://github.com/anujmahajan98) |
|
[Leetocde](https://leetcode.com/anujmah/) |
|
""" |
|
) |
|
|
|
st.header("Get to know Anuj π¨π»βπ»") |
|
st.header("Ask anything about me ") |
|
|
|
if "user_prompt_history" not in st.session_state: |
|
st.session_state["user_prompt_history"] = [] |
|
|
|
if "chat_answers_history" not in st.session_state: |
|
st.session_state["chat_answers_history"] = [] |
|
|
|
if "chat_history" not in st.session_state: |
|
st.session_state["chat_history"] = [] |
|
|
|
|
|
|
|
prompt = st.text_input("Prompt", placeholder="eg. Is it a good choice to hire him as SDE ?, What is his work experience ?...") |
|
|
|
|
|
if prompt: |
|
with st.spinner("Generating response..."): |
|
generated_response = run_llm( |
|
query=prompt, chat_history=st.session_state["chat_history"] |
|
) |
|
|
|
formatted_response = ( |
|
f"{generated_response['answer']}" |
|
) |
|
|
|
st.session_state.user_prompt_history.append(prompt) |
|
st.session_state.chat_answers_history.append(formatted_response) |
|
st.session_state.chat_history.append((prompt, generated_response["answer"])) |
|
|
|
if st.session_state["chat_answers_history"]: |
|
for generated_response, user_query in zip( |
|
st.session_state["chat_answers_history"], |
|
st.session_state["user_prompt_history"], |
|
): |
|
message( |
|
user_query, |
|
is_user=True, |
|
avatar_style="adventurer", |
|
seed=123, |
|
) |
|
|
|
st.write( |
|
f'<div style="word-wrap: break-word;">{generated_response}</div>', |
|
unsafe_allow_html=True, |
|
) |
|
|