Nicholas Meisburger commited on
Commit
10bed7b
β€’
1 Parent(s): 7ed493c

app and model

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ thirdai-0.7.18+a1506df-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl filter=lfs diff=lfs merge=lfs -text
README.md CHANGED
@@ -1,13 +1,13 @@
1
  ---
2
  title: Thirdai Llm
3
- emoji: πŸ†
4
  colorFrom: purple
5
  colorTo: red
6
  sdk: gradio
7
  sdk_version: 3.43.2
8
  app_file: app.py
9
  pinned: false
10
- license: apache-2.0
11
  ---
12
 
13
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
1
  ---
2
  title: Thirdai Llm
3
+ emoji: πŸ“–
4
  colorFrom: purple
5
  colorTo: red
6
  sdk: gradio
7
  sdk_version: 3.43.2
8
  app_file: app.py
9
  pinned: false
10
+ license: other
11
  ---
12
 
13
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
app.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from thirdai import bolt, licensing
3
+ from transformers import GPT2Tokenizer
4
+
5
+ licensing.activate("7511CC-0E24D7-69439D-5D6CBA-33AAFD-V3")
6
+
7
+ tokenizer = GPT2Tokenizer.from_pretrained("gpt2")
8
+ model = bolt.GenerativeModel.load("./generative.model")
9
+
10
+
11
+ def generate(prompt):
12
+ prompt = tokenizer.encode(prompt)
13
+
14
+ stream = model.streaming_generation(
15
+ input_tokens=prompt,
16
+ prediction_chunk_size=2,
17
+ max_predictions=100,
18
+ beam_width=10,
19
+ )
20
+
21
+ for res in stream:
22
+ yield tokenizer.decode(res)
23
+
24
+
25
+ with gr.Blocks() as demo:
26
+ output = gr.TextArea(label="Output")
27
+ prompt = gr.Textbox(
28
+ label="Prompt",
29
+ )
30
+ prompt.submit(generate, inputs=[prompt], outputs=[output])
31
+
32
+ btn = gr.Button(value="Generate")
33
+ btn.click(generate, inputs=[prompt], outputs=[output])
34
+
35
+ gr.ClearButton(components=[prompt, output])
36
+
37
+ if __name__ == "__main__":
38
+ demo.queue()
39
+ demo.launch()
generative.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9b4aab4f77a23d4bcd74ac8b585a7855f61ddf5dfeb3c5ba29fc55e0b3c68cb
3
+ size 10526674514
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ gradio
2
+ transformers
3
+ thirdai-0.7.18+a1506df-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
thirdai-0.7.18+a1506df-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b9b69d5bd77b66db4f0cc62ff94ff2666c6d3996bb534ea782cf29b004d283b
3
+ size 6137931