import streamlit as st from transformers import AutoModelForCausalLM, AutoTokenizer MODEL_NAME = "reshinthadith" def load_model_and_tokenizer(model_name): """Adding load_model_and_tokenizer function to keep the model in the memory""" model = AutoModelForCausalLM.from_pretrained(model_name) tokenizer = AutoTokenizer.from_pretrained(model_name) return tokenizer,model tokenizer,model = load_model_and_tokenizer(MODEL_NAME) st.set_page_config( page_title= "Code Representation Learning", initial_sidebar_state= "expanded" ) st.sidebar.title("Code Representation Learning") workflow = st.sidebar.selectbox('select a task', ['Bash Synthesis']) if workflow == "Bash Synthesis": st.title("Program Synthesis for Bash") arxiv_id = st.text_input("Natural Language prompt ","list all the files in the directory 'data\' ") output_diction = {} button = st.button("synthesize") if button: link_gen = r"https://arxiv.org/abs/" webbrowser.open_new_tab(link_gen+str(arxiv_id)) # Abstract with st.beta_expander("Abstract"): st.write(output_diction["abstract"]) with st.beta_expander("Influencing Citations"): st.write(output_diction["influenital_citations"]) with st.beta_expander("Citation Graph"): print("") elif workflow == "Rebuttal Analysis": st.title("Rebuttal Analysis")