wcy1122 commited on
Commit
98b49dd
·
1 Parent(s): eea7d48

update model loading

Browse files
Files changed (1) hide show
  1. app.py +5 -12
app.py CHANGED
@@ -14,27 +14,20 @@ from huggingface_hub import login, snapshot_download
14
 
15
  def _load_model_processor():
16
 
17
- local_vlm_dir = snapshot_download(
18
  repo_id="xiabs/DreamOmni2",
19
  revision="main",
20
- allow_patterns=["vlm-model/**"],
21
- )
22
- local_lora_dir = snapshot_download(
23
- repo_id="xiabs/DreamOmni2",
24
- revision="main",
25
- allow_patterns=["edit_lora/**"],
26
  )
 
 
27
 
28
  print(f"Loading models from vlm_path: {local_vlm_dir}, edit_lora_path: {local_lora_dir}")
29
  pipe = FluxKontextPipeline.from_pretrained(
30
  "black-forest-labs/FLUX.1-Kontext-dev",
31
  torch_dtype=torch.bfloat16
32
  )
33
- pipe.load_lora_weights(
34
- local_lora_dir,
35
- adapter_name="edit",
36
- weight_name="pytorch_lora_weights.safetensors"
37
- )
38
  pipe.set_adapters(["edit"], adapter_weights=[1])
39
 
40
  vlm_model = Qwen2_5_VLForConditionalGeneration.from_pretrained(
 
14
 
15
  def _load_model_processor():
16
 
17
+ local_dir = snapshot_download(
18
  repo_id="xiabs/DreamOmni2",
19
  revision="main",
20
+ allow_patterns=["vlm-model/**", "edit_lora/**"],
 
 
 
 
 
21
  )
22
+ local_vlm_dir = os.path.join(local_dir, 'vlm-model')
23
+ local_lora_dir = os.path.join(local_dir, 'edit_lora')
24
 
25
  print(f"Loading models from vlm_path: {local_vlm_dir}, edit_lora_path: {local_lora_dir}")
26
  pipe = FluxKontextPipeline.from_pretrained(
27
  "black-forest-labs/FLUX.1-Kontext-dev",
28
  torch_dtype=torch.bfloat16
29
  )
30
+ pipe.load_lora_weights(local_lora_dir, adapter_name="edit")
 
 
 
 
31
  pipe.set_adapters(["edit"], adapter_weights=[1])
32
 
33
  vlm_model = Qwen2_5_VLForConditionalGeneration.from_pretrained(