Spaces:
Sleeping
Sleeping
Kvikontent
commited on
Commit
•
5fed86e
1
Parent(s):
035d24b
Create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM
|
2 |
+
import torch
|
3 |
+
import gradio as gr
|
4 |
+
|
5 |
+
# Load pre-trained tokenizer and model
|
6 |
+
tokenizer = AutoTokenizer.from_pretrained('huggingartists/ed-sheeran')
|
7 |
+
model = AutoModelForCausalLM.from_pretrained('huggingartists/ed-sheeran', pad_token_id=50269)
|
8 |
+
|
9 |
+
# Function to generate predictions
|
10 |
+
def ed_lyrics(prompt):
|
11 |
+
encoded_prompt = tokenizer.encode(prompt + "\n\nLyrics: ", add_special_tokens=False, return_tensors='pt').to('cpu')
|
12 |
+
output_sequences = model.generate(encoded_prompt, max_length=75+len(encoded_prompt), top_p=0.8, do_sample=True)[0].tolist()
|
13 |
+
generated_song = tokenizer.decode(output_sequences[:], clean_up_tokenization_spaces=True)
|
14 |
+
final_result = generated_song.split("\n\n")[-1:]
|
15 |
+
return final_result
|
16 |
+
|
17 |
+
# Launch interactive web demo
|
18 |
+
iface = gr.Interface(fn=ed_lyrics, inputs=["textbox", outputs="text").launch()
|