birgermoell's picture
Updated model
b7851ed
raw history blame
No virus
1.23 kB
import streamlit as st
from transformers import GPT2Tokenizer, GPT2Model, FlaxGPT2LMHeadModel, GPT2LMHeadModel, pipeline, set_seed
import torch
#===========================================#
# Loads Model and Pipeline #
#===========================================#
from transformers import GPT2Tokenizer, GPT2Model, FlaxGPT2LMHeadModel, GPT2LMHeadModel, pipeline, set_seed
tokenizer = AutoTokenizer.from_pretrained("flax-community/swe-gpt-wiki")
model = AutoModelWithLMHead.from_pretrained("flax-community/swe-gpt-wiki")
generator = pipeline('text-generation', model=model, tokenizer=tokenizer)
set_seed(42)
#===========================================#
# Streamlit Code #
#===========================================#
desc = "En svensk GPT-modell tränad på wikipedia"
st.title('Vi använder wikipidea för att generera text')
st.write(desc)
num_sentences = st.number_input('Number of Sentences', min_value=1, max_value=20, value=5)
user_input = st.text_input('Seed Text (can leave blank)')
if st.button('Generate Text'):
generated_text = generator(user_input, max_length=num_sentences, num_return_sequences=1)
st.write(generated_text[0]["generated_text"])