Spaces:
Running
on
Zero
Running
on
Zero
reduce batch size
Browse files- generate.py +1 -1
generate.py
CHANGED
@@ -30,7 +30,7 @@ if torch.backends.mps.is_available():
|
|
30 |
else:
|
31 |
device = "cuda"
|
32 |
model_id = "google/gemma-2b-it"
|
33 |
-
batch_size =
|
34 |
|
35 |
model = models.transformers(model_id, device=device)
|
36 |
|
|
|
30 |
else:
|
31 |
device = "cuda"
|
32 |
model_id = "google/gemma-2b-it"
|
33 |
+
batch_size = 10
|
34 |
|
35 |
model = models.transformers(model_id, device=device)
|
36 |
|