lewtun HF staff commited on
Commit
2184a6f
β€’
1 Parent(s): c2e5d86

Enable queue

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -4,7 +4,7 @@ from threading import Thread
4
  import gradio as gr
5
  import torch
6
  from transformers import (AutoModelForCausalLM, AutoTokenizer,
7
- GenerationConfig, TextIteratorStreamer)
8
 
9
  theme = gr.themes.Monochrome(
10
  primary_hue="indigo",
@@ -82,7 +82,7 @@ with gr.Blocks(theme=theme) as demo:
82
  gr.Markdown(
83
  """<h1><center>πŸ¦™πŸ¦™πŸ¦™ StackLLaMa πŸ¦™πŸ¦™πŸ¦™</center></h1>
84
 
85
- StackLLaMa is a 7 billion parameter language model that has been trained on pairs of programming questions and answers from [Stack Overflow](https://stackoverflow.com) using Reinforcement Learning from Human Feedback (RLHF) with the [TRL library](https://github.com/lvwerra/trl). For more details, check out our blog post [ADD LINK].
86
 
87
  Type in the box below and click the button to generate answers to your most pressing coding questions πŸ”₯!
88
  """
@@ -149,5 +149,5 @@ with gr.Blocks(theme=theme) as demo:
149
  submit.click(generate, inputs=[instruction, temperature, max_new_tokens, top_p, top_k], outputs=[output])
150
  instruction.submit(generate, inputs=[instruction, temperature, max_new_tokens, top_p, top_k], outputs=[output])
151
 
152
- demo.queue()
153
- demo.launch()
 
4
  import gradio as gr
5
  import torch
6
  from transformers import (AutoModelForCausalLM, AutoTokenizer,
7
+ TextIteratorStreamer)
8
 
9
  theme = gr.themes.Monochrome(
10
  primary_hue="indigo",
 
82
  gr.Markdown(
83
  """<h1><center>πŸ¦™πŸ¦™πŸ¦™ StackLLaMa πŸ¦™πŸ¦™πŸ¦™</center></h1>
84
 
85
+ StackLLaMa is a 7 billion parameter language model that has been trained on pairs of programming questions and answers from [Stack Overflow](https://stackoverflow.com) using Reinforcement Learning from Human Feedback with the [TRL library](https://github.com/lvwerra/trl). For more details, check out our blog post [ADD LINK].
86
 
87
  Type in the box below and click the button to generate answers to your most pressing coding questions πŸ”₯!
88
  """
 
149
  submit.click(generate, inputs=[instruction, temperature, max_new_tokens, top_p, top_k], outputs=[output])
150
  instruction.submit(generate, inputs=[instruction, temperature, max_new_tokens, top_p, top_k], outputs=[output])
151
 
152
+ demo.queue(concurrency_count=1)
153
+ demo.launch(enable_queue=True)