Spaces:
Paused
Paused
Update app.py
#1
by
justheuristic
- opened
app.py
CHANGED
|
@@ -5,7 +5,11 @@ import gradio as gr
|
|
| 5 |
|
| 6 |
from src.client import DistributedBloomForCausalLM
|
| 7 |
|
| 8 |
-
INITIAL_PEERS = [
|
|
|
|
|
|
|
|
|
|
|
|
|
| 9 |
tokenizer = transformers.BloomTokenizerFast.from_pretrained("bigscience/test-bloomd-6b3")
|
| 10 |
model = DistributedBloomForCausalLM.from_pretrained("bigscience/test-bloomd-6b3", initial_peers=INITIAL_PEERS, low_cpu_mem_usage=True, torch_dtype=torch.float32)
|
| 11 |
|
|
|
|
| 5 |
|
| 6 |
from src.client import DistributedBloomForCausalLM
|
| 7 |
|
| 8 |
+
INITIAL_PEERS = [
|
| 9 |
+
'/ip4/193.106.95.184/tcp/31337/p2p/QmUigSxrVz9x5FR9ZYr4iRfEX2vDxihL2YZtDd7sp2eKnM',
|
| 10 |
+
'/ip6/193.106.95.184/tcp/21337/p2p/QmSXDXLeSMXjS4YerDrdn1zpGQaNzkZ9ogN2SoAEyAdDhs',
|
| 11 |
+
'/ip6/193.106.95.184/udp/21337/quic/QmSXDXLeSMXjS4YerDrdn1zpGQaNzkZ9ogN2SoAEyAdDhs',
|
| 12 |
+
]
|
| 13 |
tokenizer = transformers.BloomTokenizerFast.from_pretrained("bigscience/test-bloomd-6b3")
|
| 14 |
model = DistributedBloomForCausalLM.from_pretrained("bigscience/test-bloomd-6b3", initial_peers=INITIAL_PEERS, low_cpu_mem_usage=True, torch_dtype=torch.float32)
|
| 15 |
|