Spaces:
Running
on
Zero
Running
on
Zero
omni-research
commited on
Commit
•
0d593aa
1
Parent(s):
1edde68
set HF_TOKEN
Browse files- app.py +1 -1
- dataset/processor.py +3 -0
- tools/utils.py +6 -1
app.py
CHANGED
@@ -24,7 +24,7 @@ import os
|
|
24 |
import torch
|
25 |
|
26 |
device = 'cuda'
|
27 |
-
model_path = os.getenv("MODEL_PATH", "omni-research/
|
28 |
max_n_frames = int(os.getenv("MAX_N_FRAMES", 8))
|
29 |
debug = False
|
30 |
|
|
|
24 |
import torch
|
25 |
|
26 |
device = 'cuda'
|
27 |
+
model_path = os.getenv("MODEL_PATH", "omni-research/Tarsier2-7b")
|
28 |
max_n_frames = int(os.getenv("MAX_N_FRAMES", 8))
|
29 |
debug = False
|
30 |
|
dataset/processor.py
CHANGED
@@ -20,6 +20,8 @@ import re
|
|
20 |
|
21 |
from .utils import sample_image, sample_video, sample_gif, get_visual_type
|
22 |
|
|
|
|
|
23 |
ext2sampler = {
|
24 |
'image': sample_image,
|
25 |
'gif': sample_gif,
|
@@ -83,6 +85,7 @@ class Processor(object):
|
|
83 |
model_name_or_path,
|
84 |
padding_side='left',
|
85 |
trust_remote_code=True,
|
|
|
86 |
)
|
87 |
self.processor = CustomImageProcessor(sub_processor)
|
88 |
self.tokenizer = sub_processor.tokenizer
|
|
|
20 |
|
21 |
from .utils import sample_image, sample_video, sample_gif, get_visual_type
|
22 |
|
23 |
+
HF_TOKEN = os.environ.get('HF_TOKEN', '')
|
24 |
+
|
25 |
ext2sampler = {
|
26 |
'image': sample_image,
|
27 |
'gif': sample_gif,
|
|
|
85 |
model_name_or_path,
|
86 |
padding_side='left',
|
87 |
trust_remote_code=True,
|
88 |
+
token=HF_TOKEN,
|
89 |
)
|
90 |
self.processor = CustomImageProcessor(sub_processor)
|
91 |
self.tokenizer = sub_processor.tokenizer
|
tools/utils.py
CHANGED
@@ -15,6 +15,9 @@ from models.modeling_tarsier import TarsierForConditionalGeneration, LlavaConfig
|
|
15 |
from dataset.processor import Processor
|
16 |
import torch
|
17 |
import base64
|
|
|
|
|
|
|
18 |
|
19 |
class Color:
|
20 |
|
@@ -52,13 +55,15 @@ def load_model_and_processor(model_name_or_path, max_n_frames=8):
|
|
52 |
model_config = LlavaConfig.from_pretrained(
|
53 |
model_name_or_path,
|
54 |
trust_remote_code=True,
|
|
|
55 |
)
|
56 |
model = TarsierForConditionalGeneration.from_pretrained(
|
57 |
model_name_or_path,
|
58 |
config=model_config,
|
59 |
device_map='auto',
|
60 |
torch_dtype=torch.float16,
|
61 |
-
trust_remote_code=True
|
|
|
62 |
)
|
63 |
model.eval()
|
64 |
return model, processor
|
|
|
15 |
from dataset.processor import Processor
|
16 |
import torch
|
17 |
import base64
|
18 |
+
import os
|
19 |
+
|
20 |
+
HF_TOKEN = os.environ.get('HF_TOKEN', '')
|
21 |
|
22 |
class Color:
|
23 |
|
|
|
55 |
model_config = LlavaConfig.from_pretrained(
|
56 |
model_name_or_path,
|
57 |
trust_remote_code=True,
|
58 |
+
token=HF_TOKEN,
|
59 |
)
|
60 |
model = TarsierForConditionalGeneration.from_pretrained(
|
61 |
model_name_or_path,
|
62 |
config=model_config,
|
63 |
device_map='auto',
|
64 |
torch_dtype=torch.float16,
|
65 |
+
trust_remote_code=True,
|
66 |
+
token=HF_TOKEN,
|
67 |
)
|
68 |
model.eval()
|
69 |
return model, processor
|