Spaces:
Running
on
Zero
Running
on
Zero
PommesPeter
commited on
Commit
•
6beac6b
1
Parent(s):
51c2831
Update app.py
Browse files
app.py
CHANGED
@@ -10,7 +10,7 @@ subprocess.run(
|
|
10 |
os.makedirs("/home/user/app/checkpoints", exist_ok=True)
|
11 |
from huggingface_hub import snapshot_download
|
12 |
snapshot_download(
|
13 |
-
repo_id="Alpha-VLLM/Lumina-Next-
|
14 |
)
|
15 |
|
16 |
hf_token = os.environ["HF_TOKEN"]
|
@@ -104,7 +104,7 @@ def load_models(args, master_port, rank):
|
|
104 |
).eval()
|
105 |
cap_feat_dim = text_encoder.config.hidden_size
|
106 |
|
107 |
-
tokenizer = AutoTokenizer.from_pretrained("google/gemma-2b", token=hf_token)
|
108 |
tokenizer.padding_side = "right"
|
109 |
|
110 |
|
|
|
10 |
os.makedirs("/home/user/app/checkpoints", exist_ok=True)
|
11 |
from huggingface_hub import snapshot_download
|
12 |
snapshot_download(
|
13 |
+
repo_id="Alpha-VLLM/Lumina-Next-2B-HQ-SFT", local_dir="/home/user/app/checkpoints"
|
14 |
)
|
15 |
|
16 |
hf_token = os.environ["HF_TOKEN"]
|
|
|
104 |
).eval()
|
105 |
cap_feat_dim = text_encoder.config.hidden_size
|
106 |
|
107 |
+
tokenizer = AutoTokenizer.from_pretrained("google/gemma-2b", token=hf_token, add_bos_token=True, add_eos_token=True)
|
108 |
tokenizer.padding_side = "right"
|
109 |
|
110 |
|