reach-vb HF staff commited on
Commit
637cb4e
β€’
1 Parent(s): f7860cf
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -140,7 +140,7 @@ Vote to find the best Text-to-Speech model out there!
140
  INSTR = """
141
  ## Instructions
142
 
143
- * Input the text to synthesise audio (or press 🎲 for a random text).
144
  * Listen to the two audio clips, one after the other.
145
  * Vote on which audio sounds more natural to you.
146
  * Model names are revealed after the vote is cast.
@@ -456,7 +456,7 @@ with gr.Blocks() as leaderboard:
456
  reveal_prelim.input(get_leaderboard, inputs=[reveal_prelim], outputs=[df])
457
  leaderboard.load(get_leaderboard, inputs=[reveal_prelim], outputs=[df])
458
  reloadbtn.click(get_leaderboard, inputs=[reveal_prelim], outputs=[df])
459
- gr.Markdown("DISCLAIMER: The licenses listed may not be accurate or up to date, you are responsible for checking the licenses before using the models. Also note that some models may have additional usage restrictions.")
460
 
461
  # with gr.Blocks() as vote:
462
  # useridstate = gr.State()
@@ -518,7 +518,7 @@ def synthandreturn(text):
518
  if len(text) > MAX_SAMPLE_TXT_LENGTH:
519
  raise gr.Error(f'You exceeded the limit of {MAX_SAMPLE_TXT_LENGTH} characters')
520
  if len(text) < MIN_SAMPLE_TXT_LENGTH:
521
- raise gr.Error(f'Not enough text')
522
  if (toxicity.predict(text)['toxicity'] > 0.5):
523
  print(f'Detected toxic content! "{text}"')
524
  raise gr.Error('Your text failed the toxicity test')
 
140
  INSTR = """
141
  ## Instructions
142
 
143
+ * Input the text (English only) to synthesise audio (or press 🎲 for random text).
144
  * Listen to the two audio clips, one after the other.
145
  * Vote on which audio sounds more natural to you.
146
  * Model names are revealed after the vote is cast.
 
456
  reveal_prelim.input(get_leaderboard, inputs=[reveal_prelim], outputs=[df])
457
  leaderboard.load(get_leaderboard, inputs=[reveal_prelim], outputs=[df])
458
  reloadbtn.click(get_leaderboard, inputs=[reveal_prelim], outputs=[df])
459
+ # gr.Markdown("DISCLAIMER: The licenses listed may not be accurate or up to date, you are responsible for checking the licenses before using the models. Also note that some models may have additional usage restrictions.")
460
 
461
  # with gr.Blocks() as vote:
462
  # useridstate = gr.State()
 
518
  if len(text) > MAX_SAMPLE_TXT_LENGTH:
519
  raise gr.Error(f'You exceeded the limit of {MAX_SAMPLE_TXT_LENGTH} characters')
520
  if len(text) < MIN_SAMPLE_TXT_LENGTH:
521
+ raise gr.Error(f'Please input a text longer than {MIN_SAMPLE_TXT_LENGTH} characters')
522
  if (toxicity.predict(text)['toxicity'] > 0.5):
523
  print(f'Detected toxic content! "{text}"')
524
  raise gr.Error('Your text failed the toxicity test')