import streamlit as st from streamlit_option_menu import option_menu from langchain.memory import ConversationBufferWindowMemory from langchain_community.chat_message_histories import StreamlitChatMessageHistory from app import qa_bot memory_storage = StreamlitChatMessageHistory(key="chat_messages") memory = ConversationBufferWindowMemory(memory_key="chat_history", human_prefix="User", chat_memory=memory_storage, k=3)\ if 'error' not in st.session_state: st.session_state['error'] = [] def app_intro(): return """

NaturalViz : Data Exploration and Visualization with NLP

Demo for Lablab.ai Mixtral hackathon

Welcome to NaturalViz! This app explores how Language Models (LLMs) can help you visualize data just by talking to it!. we originaly wrote NaturalViz to use OpenAI functions but now have been fully converted to use Mistral-8x7B-Instruct

Key Features:

Under the Hood: ⚙️

This app uses the Mistral-8x7B-Instruct-v0.1 LLM to understand your questions and create visualizations.

Get Started:

Ask your data questions in plain language and let the magic happen! 🪄 The bot is here to help if you need it. Dataset used: 80 Cereals

""" def how_use_intro(): return """

Unlock Insights with NaturalViz! 🌐🔍


How to Use:


""" def tab2(): col1, col2 = st.columns([1, 2]) with col1: st.image("image.jpg", use_column_width=True) with col2: st.markdown(app_intro(), unsafe_allow_html=True) st.markdown(how_use_intro(),unsafe_allow_html=True) github_link = '[](https://github.com/ethanrom)' huggingface_link = '[](https://huggingface.co/ethanrom)' st.write(github_link + '   ' + huggingface_link, unsafe_allow_html=True) st.markdown("

Disclaimer: This app is a proof-of-concept and may not be suitable for real-world decisions. During the Hackthon period usage information are being recorded using Langsmith

", unsafe_allow_html=True) def tab1(): st.header("🗣️ Chat") for i, msg in enumerate(memory_storage.messages): name = "user" if i % 2 == 0 else "assistant" st.chat_message(name).markdown(msg.content) if user_input := st.chat_input("User Input"): with st.chat_message("user"): st.markdown(user_input) with st.spinner("Generating Response..."): with st.chat_message("assistant"): chain = qa_bot() zeroshot_agent_chain = get_agent_chain() response = zeroshot_agent_chain({"input": user_input}) answer = response['output'] st.markdown(answer) if st.sidebar.button("Clear Chat History"): memory_storage.clear() def main(): st.set_page_config(page_title="Virtual Tutor", page_icon=":memo:", layout="wide") #os.environ['LANGCHAIN_TRACING_V2'] = "true" #os.environ['LANGCHAIN_API_KEY'] == st.secrets['LANGCHAIN_API_KEY'] tabs = ["Chat","Audio"] with st.sidebar: current_tab = option_menu("Select a Tab", tabs, menu_icon="cast") tab_functions = { "Chat": tab1, "Audio": tab2, } if current_tab in tab_functions: tab_functions[current_tab]() if __name__ == "__main__": main()