Spaces:
Running
on
Zero
Running
on
Zero
Commit
•
0e3b560
1
Parent(s):
f16a265
Update app.py
Browse files
app.py
CHANGED
@@ -3,6 +3,7 @@ from PIL import Image
|
|
3 |
import requests
|
4 |
import subprocess
|
5 |
from transformers import Blip2Processor, Blip2ForConditionalGeneration
|
|
|
6 |
import torch
|
7 |
import uuid
|
8 |
import os
|
@@ -20,6 +21,8 @@ subprocess.run(['wget', training_script_url])
|
|
20 |
|
21 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
22 |
|
|
|
|
|
23 |
processor = Blip2Processor.from_pretrained("Salesforce/blip2-opt-2.7b")
|
24 |
model = Blip2ForConditionalGeneration.from_pretrained(
|
25 |
"Salesforce/blip2-opt-2.7b", device_map={"": 0}, torch_dtype=torch.float16
|
@@ -72,7 +75,7 @@ def make_options_visible(option):
|
|
72 |
)
|
73 |
def change_defaults(option, images):
|
74 |
num_images = len(images)
|
75 |
-
max_train_steps = num_images*150
|
76 |
max_train_steps = 500 if max_train_steps < 500 else max_train_steps
|
77 |
random_files = []
|
78 |
with_prior_preservation = False
|
@@ -91,7 +94,7 @@ def change_defaults(option, images):
|
|
91 |
max_train_steps = num_images*100
|
92 |
lr_scheduler = "constant"
|
93 |
#Takes 150 random faces for the prior preservation loss
|
94 |
-
directory =
|
95 |
file_count = 150
|
96 |
files = [os.path.join(directory, file) for file in os.listdir(directory) if os.path.isfile(os.path.join(directory, file))]
|
97 |
random_files = random.sample(files, min(len(files), file_count))
|
|
|
3 |
import requests
|
4 |
import subprocess
|
5 |
from transformers import Blip2Processor, Blip2ForConditionalGeneration
|
6 |
+
from huggingface_hub import snapshot_download
|
7 |
import torch
|
8 |
import uuid
|
9 |
import os
|
|
|
21 |
|
22 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
23 |
|
24 |
+
FACES_DATASET_PATH = snapshot_download(repo_id="multimodalart/faces-prior-preservation", repo_type="dataset")
|
25 |
+
|
26 |
processor = Blip2Processor.from_pretrained("Salesforce/blip2-opt-2.7b")
|
27 |
model = Blip2ForConditionalGeneration.from_pretrained(
|
28 |
"Salesforce/blip2-opt-2.7b", device_map={"": 0}, torch_dtype=torch.float16
|
|
|
75 |
)
|
76 |
def change_defaults(option, images):
|
77 |
num_images = len(images)
|
78 |
+
max_train_steps = num_images * 150
|
79 |
max_train_steps = 500 if max_train_steps < 500 else max_train_steps
|
80 |
random_files = []
|
81 |
with_prior_preservation = False
|
|
|
94 |
max_train_steps = num_images*100
|
95 |
lr_scheduler = "constant"
|
96 |
#Takes 150 random faces for the prior preservation loss
|
97 |
+
directory = FACES_DATASET_PATH
|
98 |
file_count = 150
|
99 |
files = [os.path.join(directory, file) for file in os.listdir(directory) if os.path.isfile(os.path.join(directory, file))]
|
100 |
random_files = random.sample(files, min(len(files), file_count))
|