Fix indentation
Browse files
app.py
CHANGED
@@ -18,9 +18,9 @@ from huggingface_hub import snapshot_download
|
|
18 |
|
19 |
if torch.cuda.device_count() == 0:
|
20 |
class Arguments:
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
|
25 |
# Create the object
|
26 |
args = Arguments("ckpts/llava-llama-3-8b-v1_1-transformers", "ckpts/text_encoder")
|
@@ -31,9 +31,9 @@ if torch.cuda.device_count() > 0:
|
|
31 |
snapshot_download(repo_id="xtuner/llava-llama-3-8b-v1_1-transformers", repo_type="model", local_dir="ckpts/llava-llama-3-8b-v1_1-transformers", force_download=True)
|
32 |
|
33 |
class Args:
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
|
38 |
# Create the object
|
39 |
args = Args("ckpts/llava-llama-3-8b-v1_1-transformers", "ckpts/text_encoder")
|
|
|
18 |
|
19 |
if torch.cuda.device_count() == 0:
|
20 |
class Arguments:
|
21 |
+
def __init__(self, input_dir, output_dir):
|
22 |
+
self.input_dir = input_dir
|
23 |
+
self.output_dir = output_dir
|
24 |
|
25 |
# Create the object
|
26 |
args = Arguments("ckpts/llava-llama-3-8b-v1_1-transformers", "ckpts/text_encoder")
|
|
|
31 |
snapshot_download(repo_id="xtuner/llava-llama-3-8b-v1_1-transformers", repo_type="model", local_dir="ckpts/llava-llama-3-8b-v1_1-transformers", force_download=True)
|
32 |
|
33 |
class Args:
|
34 |
+
def __init__(self, input_dir, output_dir):
|
35 |
+
self.input_dir = input_dir
|
36 |
+
self.output_dir = output_dir
|
37 |
|
38 |
# Create the object
|
39 |
args = Args("ckpts/llava-llama-3-8b-v1_1-transformers", "ckpts/text_encoder")
|