Nicholas Meisburger commited on
Commit
22ba2c4
β€’
1 Parent(s): c7d63a8

Update app and name

Browse files
Files changed (2) hide show
  1. README.md +1 -1
  2. app.py +14 -3
README.md CHANGED
@@ -1,5 +1,5 @@
1
  ---
2
- title: Thirdai Llm
3
  emoji: πŸ“–
4
  colorFrom: purple
5
  colorTo: red
 
1
  ---
2
+ title: BOLT2.5B
3
  emoji: πŸ“–
4
  colorFrom: purple
5
  colorTo: red
app.py CHANGED
@@ -9,9 +9,9 @@ model = bolt.GenerativeModel.load("./generative.model")
9
 
10
 
11
  def generate(prompt, beam_width, temperature):
12
- prompt = tokenizer.encode(prompt)
13
 
14
- stream = model.streaming_generation(
15
  input_tokens=prompt,
16
  prediction_chunk_size=2,
17
  max_predictions=80,
@@ -25,7 +25,7 @@ def generate(prompt, beam_width, temperature):
25
 
26
  with gr.Blocks() as demo:
27
  prompt = gr.Textbox(label="Prompt", autofocus=True)
28
- output = gr.TextArea(label="Output")
29
  beam_width = gr.Slider(minimum=1, maximum=10, step=1, value=3, label="Beam Width")
30
  temperature = gr.Slider(
31
  minimum=0,
@@ -42,6 +42,17 @@ with gr.Blocks() as demo:
42
 
43
  gr.ClearButton(components=[prompt, output])
44
 
 
 
 
 
 
 
 
 
 
 
 
45
  if __name__ == "__main__":
46
  demo.queue()
47
  demo.launch()
 
9
 
10
 
11
  def generate(prompt, beam_width, temperature):
12
+ prompt = tokenizer.encode(prompt.strip())
13
 
14
+ stream = model.streaming_generate(
15
  input_tokens=prompt,
16
  prediction_chunk_size=2,
17
  max_predictions=80,
 
25
 
26
  with gr.Blocks() as demo:
27
  prompt = gr.Textbox(label="Prompt", autofocus=True)
28
+ output = gr.TextArea(label="Output", lines=5)
29
  beam_width = gr.Slider(minimum=1, maximum=10, step=1, value=3, label="Beam Width")
30
  temperature = gr.Slider(
31
  minimum=0,
 
42
 
43
  gr.ClearButton(components=[prompt, output])
44
 
45
+ gr.Markdown(
46
+ value="""
47
+ # BOLT2.5B
48
+ BOLT2.5B is meticulously trained on CPUs, employing dynamic sparse technology, which lies at the core of our groundbreaking BOLT engine. A decade of dedicated research has culminated in BOLT, ensuring unparalleled efficiency for neural networks. The dynamic sparsity feature empowers us to selectively activate neural pathways, enabling optimal training even on CPU resources.
49
+
50
+ This release have 2.5 billion parameter model, along with both inference and training scripts tailored for distributed as well as single machine training scenarios. For more information visit this (link to anshu blog)
51
+
52
+ Note: This model is only trained on next word prediction, no instruct fine tuning is done. No instruction data is used in training.
53
+ """
54
+ )
55
+
56
  if __name__ == "__main__":
57
  demo.queue()
58
  demo.launch()