Spaces:
Sleeping
Sleeping
import streamlit as st | |
import json | |
import os | |
import requests | |
from bardapi import Bard | |
# Load the GOOGLE_LANGUAGES_TO_CODES dictionary from lang.json | |
with open("lang.json", "r") as file: | |
GOOGLE_LANGUAGES_TO_CODES = json.load(file) | |
# Add a selector in the sidebar using the dictionary's keys | |
selected_language_name = "english" | |
code_interpreter = True | |
system_prompt = "Rule 1: If a user provides a code explain it line by line" | |
useSystemPrompt = True | |
exportToReplIt = False | |
showImages = True | |
# Retrieve the corresponding language code from the dictionary | |
selected_language_code = GOOGLE_LANGUAGES_TO_CODES[selected_language_name] | |
# Initialize Bard with the selected language code | |
bard = Bard(token=os.getenv("_BARD_API_KEY"), language=selected_language_code) | |
TITLE = "π¦ Ayush's Codebot" | |
DESCRIPTION = """ | |
Welcome to my coding chatbot based on Google's Palm 2 ! paste any code you want an explanation for! | |
""" | |
# Streamlit UI | |
st.title(TITLE) | |
st.write(DESCRIPTION) | |
# Prediction function | |
def predict(message): | |
with st.status("Requesting π¦..."): | |
st.write("Requesting API...") | |
response = bard.get_answer(message if not (code_interpreter and useSystemPrompt) else message + " . "+system_prompt) | |
st.write("Done...") | |
st.write("Checking images...") | |
if 'images' in response.keys() and showImages: | |
for i in response['images']: | |
st.image(i) | |
return response | |
# Display chat messages from history on app rerun | |
if "messages" not in st.session_state: | |
st.session_state.messages = [] | |
for message in st.session_state.messages: | |
with st.chat_message(message["role"], avatar=("π§βπ»" if message["role"] == 'human' else 'π¦')): | |
st.markdown(message["content"]) | |
# React to user input | |
if prompt := st.chat_input("Ask π¦ anything..."): | |
st.chat_message("human", avatar="π§βπ»").markdown(prompt) | |
st.session_state.messages.append({"role": "human", "content": prompt}) | |
response = predict(prompt) | |
with st.chat_message("assistant", avatar='π¦'): | |
st.markdown(response['content']) | |
if response['code']: | |
try: | |
exec(response['code']) | |
except Exception as e: | |
st.write(f"End of explanation..") | |
st.session_state.messages.append({"role": "assistant", "content": response['content']}) |