Spaces:
Runtime error
Runtime error
import streamlit as st | |
import time | |
from transformers import pipeline | |
import torch | |
st.markdown('## Text-generation OPT from Facebook') | |
def get_model(): | |
return pipeline('text-generation', model=model, do_sample=True, skip_special_tokens=True) | |
col1, col2 = st.columns([2,1]) | |
with st.sidebar: | |
st.markdown('## Model Parameters') | |
max_length = st.slider('Max text length', 0, 150, 80) | |
num_beams = st.slider('N° tree beams search', 2, 15, 5) | |
early_stopping = st.selectbox( | |
'Early stopping text generation', | |
('True', 'False'), key={'True' : True, 'False': False}, index=0) | |
no_ngram_repeat = st.slider('Max repetition limit', 1, 5, 2) | |
with col1: | |
prompt= st.text_area('Your prompt here', | |
'''Who is Elon Musk?''') | |
with col2: | |
select_model = st.radio( | |
"Select the model to use:", | |
('OPT-125m', 'OPT-350m', 'OPT-1.3b'), index = 1) | |
if select_model == 'OPT-1.3b': | |
model = 'facebook/opt-1.3b' | |
elif select_model == 'OPT-350m': | |
model = 'facebook/opt-350m' | |
elif select_model == 'OPT-125m': | |
model = 'facebook/opt-125m' | |
with st.spinner('Loading Model... (This may take a while)'): | |
generator = get_model() | |
st.success('Model loaded correctly!') | |
gen = st.info('Generating text...') | |
answer = generator(prompt, | |
max_length=max_length, no_repeat_ngram_size=no_ngram_repeat, | |
early_stopping=early_stopping, num_beams=num_beams, | |
skip_special_tokens=True) | |
gen.empty() | |
lst = answer[0]['generated_text'] | |
t = st.empty() | |
for i in range(len(lst)): | |
t.markdown("#### %s" % lst[0:i]) | |
time.sleep(0.04) |