lewtun HF staff commited on
Commit
30dcff9
β€’
1 Parent(s): 76e883b

Fix examples

Browse files
Files changed (1) hide show
  1. app.py +7 -5
app.py CHANGED
@@ -32,7 +32,7 @@ tokenizer = AutoTokenizer.from_pretrained(model_id, use_auth_token=HF_TOKEN)
32
  PROMPT_TEMPLATE = """Question: {prompt}\n\nAnswer:"""
33
 
34
 
35
- def generate(instruction, temperature=0.7, max_new_tokens=256, top_p=0.95, top_k=40):
36
  formatted_instruction = PROMPT_TEMPLATE.format(prompt=instruction)
37
 
38
  temperature = float(temperature)
@@ -69,8 +69,10 @@ def generate(instruction, temperature=0.7, max_new_tokens=256, top_p=0.95, top_k
69
 
70
  examples = [
71
  "How do I create an array in C++ of length 5 which contains all even numbers between 1 and 10?",
72
- "How can I write a Java function to generate the nth Fibonacci number?",
73
  "How can I sort a list in Python?",
 
 
 
74
  ]
75
 
76
 
@@ -85,7 +87,7 @@ with gr.Blocks(theme=theme, analytics_enabled=False) as demo:
85
  gr.Markdown(
86
  """<h1><center>πŸ¦™πŸ¦™πŸ¦™ StackLLaMa πŸ¦™πŸ¦™πŸ¦™</center></h1>
87
 
88
- StackLLaMa is a 7 billion parameter language model that has been trained on pairs of programming questions and answers from [Stack Overflow](https://stackoverflow.com) using Reinforcement Learning from Human Feedback with the [TRL library](https://github.com/lvwerra/trl). For more details, check out our blog post [ADD LINK].
89
 
90
  Type in the box below and click the button to generate answers to your most pressing coding questions πŸ”₯!
91
  """
@@ -114,7 +116,7 @@ with gr.Blocks(theme=theme, analytics_enabled=False) as demo:
114
  with gr.Column(scale=1):
115
  temperature = gr.Slider(
116
  label="Temperature",
117
- value=0.7,
118
  minimum=0.0,
119
  maximum=2.0,
120
  step=0.1,
@@ -153,4 +155,4 @@ with gr.Blocks(theme=theme, analytics_enabled=False) as demo:
153
  instruction.submit(generate, inputs=[instruction, temperature, max_new_tokens, top_p, top_k], outputs=[output])
154
 
155
  demo.queue(concurrency_count=1)
156
- demo.launch(enable_queue=True)
 
32
  PROMPT_TEMPLATE = """Question: {prompt}\n\nAnswer:"""
33
 
34
 
35
+ def generate(instruction, temperature=0.8, max_new_tokens=128, top_p=0.95, top_k=40):
36
  formatted_instruction = PROMPT_TEMPLATE.format(prompt=instruction)
37
 
38
  temperature = float(temperature)
 
69
 
70
  examples = [
71
  "How do I create an array in C++ of length 5 which contains all even numbers between 1 and 10?",
 
72
  "How can I sort a list in Python?",
73
+ "How many helicopters can a human eat in one sitting?",
74
+ "How can I write a Java function to generate the nth Fibonacci number?",
75
+ "There's a lion in my garden. How can I get rid of it?"
76
  ]
77
 
78
 
 
87
  gr.Markdown(
88
  """<h1><center>πŸ¦™πŸ¦™πŸ¦™ StackLLaMa πŸ¦™πŸ¦™πŸ¦™</center></h1>
89
 
90
+ StackLLaMa is a 7 billion parameter language model that has been trained on pairs of programming questions and answers from [Stack Exchange](https://stackexchange.com) using Reinforcement Learning from Human Feedback with the [TRL library](https://github.com/lvwerra/trl). For more details, check out our [blog post](https://huggingface.co/blog/stackllama).
91
 
92
  Type in the box below and click the button to generate answers to your most pressing coding questions πŸ”₯!
93
  """
 
116
  with gr.Column(scale=1):
117
  temperature = gr.Slider(
118
  label="Temperature",
119
+ value=0.8,
120
  minimum=0.0,
121
  maximum=2.0,
122
  step=0.1,
 
155
  instruction.submit(generate, inputs=[instruction, temperature, max_new_tokens, top_p, top_k], outputs=[output])
156
 
157
  demo.queue(concurrency_count=1)
158
+ demo.launch(enable_queue=True, share=True)