Spaces:
Running
on
Zero
Running
on
Zero
VictorSanh
commited on
Commit
•
db79ccc
1
Parent(s):
bf8b9f8
update with the chatty
Browse files- app_dialogue.py +2 -4
- requirements.txt +1 -1
app_dialogue.py
CHANGED
@@ -3,8 +3,6 @@ import subprocess
|
|
3 |
|
4 |
# Install flash attention
|
5 |
subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
|
6 |
-
# Install private transformers fork which is the only place where idefics2 has been integrated at the time being
|
7 |
-
subprocess.run(f"pip install git+https://github.com/huggingface/transformers.git@16c8317a5cc9297488a08ead83ea5a752f0912b6", shell=True)
|
8 |
|
9 |
|
10 |
import copy
|
@@ -31,8 +29,8 @@ MODELS = {
|
|
31 |
trust_remote_code=True,
|
32 |
token=os.environ["HF_AUTH_TOKEN"],
|
33 |
).to(DEVICE),
|
34 |
-
"idefics2-8b (chat)": Idefics2ForConditionalGeneration.from_pretrained(
|
35 |
-
"HuggingFaceM4/idefics2-
|
36 |
torch_dtype=torch.bfloat16,
|
37 |
_attn_implementation="flash_attention_2",
|
38 |
trust_remote_code=True,
|
|
|
3 |
|
4 |
# Install flash attention
|
5 |
subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
|
|
|
|
|
6 |
|
7 |
|
8 |
import copy
|
|
|
29 |
trust_remote_code=True,
|
30 |
token=os.environ["HF_AUTH_TOKEN"],
|
31 |
).to(DEVICE),
|
32 |
+
"idefics2-8b-chatty (chat)": Idefics2ForConditionalGeneration.from_pretrained(
|
33 |
+
"HuggingFaceM4/idefics2-8b-chatty",
|
34 |
torch_dtype=torch.bfloat16,
|
35 |
_attn_implementation="flash_attention_2",
|
36 |
trust_remote_code=True,
|
requirements.txt
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
transformers
|
2 |
datasets
|
3 |
pillow
|
4 |
numpy
|
|
|
1 |
+
git+https://github.com/huggingface/transformers.git@6b78360e6d686b316360334f5109b46c39ff5ed8
|
2 |
datasets
|
3 |
pillow
|
4 |
numpy
|