from os import CLD_CONTINUED import streamlit as st from transformers import AutoTokenizer, AutoModelForCausalLM from transformers import pipeline @st.cache(allow_output_mutation=True) def load_model(): model_ckpt = "flax-community/gpt2-rap-lyric-generator" tokenizer = AutoTokenizer.from_pretrained(model_ckpt,from_flax=True) model = AutoModelForCausalLM.from_pretrained(model_ckpt,from_flax=True) return tokenizer, model @st.cache() def load_rappers(): text_file = open("rappers.txt") rappers = text_file.readlines() rappers = [name[:-1] for name in rappers] rappers.sort() return rappers title = st.title("Loading model") tokenizer, model = load_model() text_generation = pipeline("text-generation", model=model, tokenizer=tokenizer) title.title("Rap lyrics generator") #artist = st.text_input("Enter the artist", "Wu-Tang Clan") list_of_rappers = load_rappers() artist = st.selectbox("Choose your rapper", tuple(list_of_rappers), index = len(list_of_rappers)-1) song_name = st.text_input("Enter the desired song name", "Sadboys") if st.button("Generate lyrics", help="The lyrics generation can last up to 2 minutres"): st.title(f"{artist}: {song_name}") prefix_text = f"{song_name} [Verse 1:{artist}]" generated_song = text_generation(prefix_text, max_length=750, do_sample=True)[0] for count, line in enumerate(generated_song['generated_text'].split("\n")): if"" in line: break if count == 0: st.markdown(f"**{line[line.find('['):]}**") continue if "" in line: st.write(line[5:]) continue if line.startswith("["): st.markdown(f"**{line}**") continue st.write(line)