ysharma HF staff commited on
Commit
ac563f7
1 Parent(s): a7e2869
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -65,7 +65,7 @@ demo = gr.Blocks()
65
  with demo:
66
  gr.Markdown("<h1><center>Step By Step With Bloom</center></h1>")
67
  gr.Markdown(
68
- """ [BigScienceW Bloom](https://twitter.com/BigscienceW) \n\n Large language models have demonstrated a capability of 'Chain-of-thought reasoning'. Some amazing researchers( [Jason Wei et al.](https://arxiv.org/abs/2205.11916)) recently found out that by addding **Lets think step by step** it improves the model's zero-shot performance. Some might say — You can get good results out of LLMs if you know how to speak to them.\n\nThis Space is created by [Yuvraj Sharma](https://twitter.com/yvrjsharma) for EuroPython 2022 Demo."""
69
  )
70
  with gr.Row():
71
 
@@ -76,7 +76,7 @@ with demo:
76
  example_template = gr.Radio( ["Let’s think step by step."," First, ", " Let’s think about this logically.", "Let’s solve this problem by splitting it into steps.", " Let’s be realistic and think step by step.", "Let’s think like a detective step by step.", "Let’s think", "Before we dive into the answer,", "The answer is after the proof."], label= "Choose a sample Template for Zero-Shot CoT")
77
 
78
  #input_word = gr.Textbox(placeholder="Enter a word here to generate text ...")
79
- generated_txt = gr.Textbox(lines=7)
80
 
81
 
82
  b1 = gr.Button("Generate Text")
 
65
  with demo:
66
  gr.Markdown("<h1><center>Step By Step With Bloom</center></h1>")
67
  gr.Markdown(
68
+ """ [BigScienceW Bloom](https://twitter.com/BigscienceW) \n\n Large language models have demonstrated a capability of 'Chain-of-thought reasoning'. A group of amazing researchers( [Jason Wei et al.](https://arxiv.org/abs/2205.11916)) recently found out that by adding **Lets think step by step** it improves the model's zero-shot performance. Some might say — You can get good results out of LLMs if you know how to speak to them. This space is an attempt at inspecting this LLM behavior/capability in the new HuggingFace BigScienceW [Bloom](https://huggingface.co/bigscience/bloom) model. \n\nThis Space is created by [Yuvraj Sharma](https://twitter.com/yvrjsharma) for EuroPython 2022 Demo.\nThis Space might sometime fail due to inference queue being full and logs would end up showing error as *queue full, try again later*, don't despair and try again after some time. I would try and improve the app as well over next couple days."""
69
  )
70
  with gr.Row():
71
 
 
76
  example_template = gr.Radio( ["Let’s think step by step."," First, ", " Let’s think about this logically.", "Let’s solve this problem by splitting it into steps.", " Let’s be realistic and think step by step.", "Let’s think like a detective step by step.", "Let’s think", "Before we dive into the answer,", "The answer is after the proof."], label= "Choose a sample Template for Zero-Shot CoT")
77
 
78
  #input_word = gr.Textbox(placeholder="Enter a word here to generate text ...")
79
+ generated_txt = gr.Textbox(lines=10)
80
 
81
 
82
  b1 = gr.Button("Generate Text")