wvangils commited on
Commit
4055606
1 Parent(s): e6d0718

New version of App

Browse files

Add the ability for users to select a language model to generate text. Now: a radio button choice of 3 models.

Files changed (1) hide show
  1. app.py +13 -23
app.py CHANGED
@@ -2,18 +2,13 @@ import transformers
2
  from transformers import pipeline
3
  import gradio as gr
4
 
5
- # Available models for pipeline
6
- # checkpoint = 'wvangils/CTRL-Beatles-Lyrics-finetuned-newlyrics'
7
- checkpoint = 'wvangils/GPT-Medium-Beatles-Lyrics-finetuned-newlyrics'
8
- #checkpoint = 'wvangils/GPT-Neo-125m-Beatles-Lyrics-finetuned-newlyrics'
9
- # checkpoint = 'wvangils/GPT2-Beatles-Lyrics-finetuned-newlyrics'
10
- # checkpoint = 'wvangils/DistilGPT2-Beatles-Lyrics-finetuned-newlyrics'
11
-
12
- # Create generator
13
- generator = pipeline("text-generation", model=checkpoint)
14
 
15
  # Create function for generation
16
- def generate_beatles(input_prompt, temperature, top_p):
 
 
 
17
  generated_lyrics = generator(input_prompt
18
  , max_length = 100
19
  , num_return_sequences = 1
@@ -32,30 +27,25 @@ def generate_beatles(input_prompt, temperature, top_p):
32
 
33
  # Create textboxes for input and output
34
  input_box = gr.Textbox(label="Input prompt:", placeholder="Write the start of a song here", value="In my dreams I am", lines=2, max_lines=5)
35
- output_box = gr.Textbox(label="Lyrics by The Beatles and GPT:", lines=25)
36
-
37
- # Specify examples
38
- examples = [['In my dreams I am', 0.7], ['I don\'t feel alive when', 0.7]]
39
 
40
  # Layout and text above the App
41
- title='Beatles lyrics generator based on GPT2'
42
- description="<p style='text-align: center'>A medium class GPT2 model was fine-tuned on lyrics from The Beatles to generate Beatles-like text. Give it a try!</p>"
43
- article="""<p style='text-align: left'>A couple of data scientists working for <a href='https://cmotions.nl/' targeet="_blank">Cmotions</a> came together to construct a Language Generation model that will ouput Beatles-like text.
44
- We used several models that were able to load in Colab and choose <a href='https://huggingface.co/gpt2-medium' target='_blank'>GPT2-medium</a> as the winner. Further we've put together a <a href='https://huggingface.co/datasets/cmotions/Beatles_lyrics' target='_blank'> Huggingface dataset</a> containing all known lyrics created by
45
- The Beatles. <a href='https://www.theanalyticslab.nl/blogs/' target='_blank'>Read this blog </a> to see how this model was build in a Python the notebook using Huggingface.
46
  The default output contains 100 tokens and has a repetition penalty of 1.0.
47
  </p>"""
48
 
49
  # Let users select their own temperature and top-p
50
  temperature = gr.Slider(minimum=0.1, maximum=1.0, step=0.1, label="Temperature (high = sensitive for low probability tokens)", value=0.7, show_label=True)
51
  top_p = gr.Slider(minimum=0.1, maximum=1.0, step=0.1, label="Top-p (sample next possible words from given probability p)", value=0.5, show_label=True)
52
-
53
- # Can I put examples in an input dropdown box?
54
- #examples_dropdown = gr.Dropdown(choises=examples, value = 'In my dreams I am', label='Examples', show_label=True)
55
 
56
  # Use generate Beatles function in demo-app Gradio
57
  gr.Interface(fn=generate_beatles
58
- , inputs=[input_box, temperature, top_p]
59
  , outputs=output_box
60
  #, examples=examples # output is not very fancy as you have to specify all inputs for every example
61
  , title=title
 
2
  from transformers import pipeline
3
  import gradio as gr
4
 
5
+ checkpoint_choices = ['wvangils/GPT-Medium-Beatles-Lyrics-finetuned-newlyrics', 'wvangils/GPT-Neo-125m-Beatles-Lyrics-finetuned-newlyrics', 'wvangils/BLOOM-350m-Beatles-Lyrics-finetuned-newlyrics']
 
 
 
 
 
 
 
 
6
 
7
  # Create function for generation
8
+ def generate_beatles(checkpoint, input_prompt, temperature, top_p):
9
+ # Create generator for different models
10
+ generator = pipeline("text-generation", model=checkpoint)
11
+
12
  generated_lyrics = generator(input_prompt
13
  , max_length = 100
14
  , num_return_sequences = 1
 
27
 
28
  # Create textboxes for input and output
29
  input_box = gr.Textbox(label="Input prompt:", placeholder="Write the start of a song here", value="In my dreams I am", lines=2, max_lines=5)
30
+ output_box = gr.Textbox(label="Lyrics by The Beatles and chosen language model:", lines=25)
 
 
 
31
 
32
  # Layout and text above the App
33
+ title='Beatles lyrics generator'
34
+ description="<p style='text-align: center'>Multiple language models were fine-tuned on lyrics from The Beatles to generate Beatles-like text. Give it a try!</p>"
35
+ article="""<p style='text-align: left'>A couple of data scientists working for <a href='https://cmotions.nl/' target="_blank">Cmotions</a> came together to construct a text generation model that will output Beatles-like text.
36
+ We tried several text generation models that we were able to load in Colab: a general <a href='https://huggingface.co/gpt2-medium' target='_blank'>GPT2-medium</a> model, the Eleuther AI small-sized GPT model <a href='https://huggingface.co/EleutherAI/gpt-neo-125M' target='_blank'>GPT-Neo</a> and the new kid on the block build by the <a href='https://bigscience.notion.site/BLOOM-BigScience-176B-Model-ad073ca07cdf479398d5f95d88e218c4' target='_blank'>Bigscience</a> initiative <a href='bigscience/bloom-350m' target='_blank'>BLOOM 350m</a>.
37
+ Further we've put together a <a href='https://huggingface.co/datasets/cmotions/Beatles_lyrics' target='_blank'> Huggingface dataset</a> containing all known lyrics created by The Beatles. <a href='https://www.theanalyticslab.nl/blogs/' target='_blank'>Read this blog </a> to see how this model was build in a Python notebook using Huggingface.
38
  The default output contains 100 tokens and has a repetition penalty of 1.0.
39
  </p>"""
40
 
41
  # Let users select their own temperature and top-p
42
  temperature = gr.Slider(minimum=0.1, maximum=1.0, step=0.1, label="Temperature (high = sensitive for low probability tokens)", value=0.7, show_label=True)
43
  top_p = gr.Slider(minimum=0.1, maximum=1.0, step=0.1, label="Top-p (sample next possible words from given probability p)", value=0.5, show_label=True)
44
+ checkpoint = gr.Radio(checkpoint_choices, value='wvangils/GPT-Medium-Beatles-Lyrics-finetuned-newlyrics', interactive=True, label = 'Select fine-tuned model', show_label=True)
 
 
45
 
46
  # Use generate Beatles function in demo-app Gradio
47
  gr.Interface(fn=generate_beatles
48
+ , inputs=[checkpoint, input_box, temperature, top_p]
49
  , outputs=output_box
50
  #, examples=examples # output is not very fancy as you have to specify all inputs for every example
51
  , title=title