artek0chumak commited on
Commit
23f3344
1 Parent(s): 1d23779
Files changed (3) hide show
  1. .gitmodules +3 -0
  2. app.py +26 -2
  3. petals +1 -0
.gitmodules ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ [submodule "petals"]
2
+ path = petals
3
+ url = https://github.com/bigscience-workshop/petals
app.py CHANGED
@@ -1,4 +1,28 @@
 
 
 
 
1
  import streamlit as st
2
 
3
- x = st.slider('Select a value')
4
- st.write(x, 'squared is', x * x)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ sys.path.insert(0, './petals/')
3
+
4
+ import transformers
5
  import streamlit as st
6
 
7
+ # Import a Petals model
8
+ from src.client.remote_model import DistributedBloomForCausalLM
9
+
10
+
11
+ MODEL_NAME = "bigscience/test-bloomd-6b3" # select model you like
12
+ INITIAL_PEERS = ["/ip4/193.106.95.184/tcp/31000/p2p/QmSg7izCDtowVTACbUmWvEiQZNY4wgCQ9T9Doo66K59X6q"]
13
+
14
+ tokenizer = transformers.BloomTokenizerFast.from_pretrained(MODEL_NAME)
15
+ model = DistributedBloomForCausalLM.from_pretrained(
16
+ MODEL_NAME,
17
+ initial_peers=INITIAL_PEERS,
18
+ ).to("cpu")
19
+
20
+ text = st.text_input('Enter some text')
21
+ max_new_tokens = st.slider('Select a value', min_value=1, max_value=100)
22
+
23
+ if text:
24
+ model = DistributedBloomForCausalLM(MODEL_NAME, INITIAL_PEERS)
25
+ input_ids = tokenizer([text], return_tensors="pt").input_ids
26
+ output = model.generate(input_ids, max_new_tokens=max_new_tokens)
27
+ output_text = tokenizer.batch_decode(output)
28
+ st.write(output_text)
petals ADDED
@@ -0,0 +1 @@
 
 
1
+ Subproject commit 57e8d2e721371ff9f5200cd228dfdea5bf9ed68d