TheLastBen commited on
Commit
827d95a
1 Parent(s): deb4d74

Upload sdxllorapps.py

Browse files
Files changed (1) hide show
  1. Scripts/sdxllorapps.py +37 -20
Scripts/sdxllorapps.py CHANGED
@@ -67,6 +67,7 @@ def Deps(force_reinstall):
67
  os.environ['TORCH_HOME'] = '/notebooks/cache/torch'
68
  os.environ['PYTHONWARNINGS'] = 'ignore'
69
  call("sed -i 's@text = _formatwarnmsg(msg)@text =\"\"@g' /usr/lib/python3.9/warnings.py", shell=True)
 
70
  if not os.path.exists('/notebooks/diffusers'):
71
  call('ln -s /diffusers /notebooks', shell=True)
72
  call("rm -r /deps", shell=True)
@@ -614,7 +615,8 @@ def caption(CAPTIONS_DIR, INSTANCE_DIR):
614
 
615
 
616
 
617
- def dbtrainxl(Resume_Training, UNet_Training_Epochs, UNet_Learning_Rate, dim, Offset_Noise, Resolution, MODEL_NAME, SESSION_DIR, INSTANCE_DIR, CAPTIONS_DIR, External_Captions, INSTANCE_NAME, Session_Name, OUTPUT_DIR, ofstnselvl, Save_VRAM):
 
618
 
619
  if os.path.exists(INSTANCE_DIR+"/.ipynb_checkpoints"):
620
  call('rm -r '+INSTANCE_DIR+'/.ipynb_checkpoints', shell=True)
@@ -622,10 +624,6 @@ def dbtrainxl(Resume_Training, UNet_Training_Epochs, UNet_Learning_Rate, dim, Of
622
  call('rm -r '+CAPTIONS_DIR+'/.ipynb_checkpoints', shell=True)
623
 
624
 
625
- while not Resume_Training and not os.path.exists(MODEL_NAME+'/unet/diffusion_pytorch_model.safetensors'):
626
- print('No model found, use the "Model Download" cell to download a model.')
627
- time.sleep(5)
628
-
629
  Seed=random.randint(1, 999999)
630
 
631
  ofstnse=""
@@ -642,22 +640,37 @@ def dbtrainxl(Resume_Training, UNet_Training_Epochs, UNet_Learning_Rate, dim, Of
642
 
643
  precision="fp16"
644
 
645
- resume=""
646
- if Resume_Training and os.path.exists(SESSION_DIR+'/'+Session_Name+'.safetensors'):
647
- resume="--resume"
648
-
649
- print('Resuming Training...')
650
- elif Resume_Training and not os.path.exists(SESSION_DIR+'/'+Session_Name+'.safetensors'):
651
- while MODEL_NAME=="":
652
- print('No model found, use the "Model Download" cell to download a model.')
653
- time.sleep(5)
654
- print('Previous model not found, training a new model...')
655
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
656
 
657
 
658
  def train_only_unet(SESSION_DIR, MODEL_NAME, INSTANCE_DIR, OUTPUT_DIR, Seed, Resolution, ofstnse, extrnlcptn, precision, Training_Epochs):
 
659
  call('accelerate launch /notebooks/diffusers/examples/dreambooth/train_dreambooth_sdxl_lora.py \
660
- '+resume+' \
661
  '+ofstnse+' \
662
  '+extrnlcptn+' \
663
  --dim='+str(dim)+' \
@@ -674,18 +687,22 @@ def dbtrainxl(Resume_Training, UNet_Training_Epochs, UNet_Learning_Rate, dim, Of
674
  --train_batch_size=1 \
675
  --gradient_accumulation_steps=1 '+GC+ ' \
676
  --use_8bit_adam \
677
- --learning_rate='+str(UNet_Learning_Rate)+' \
678
  --lr_scheduler="cosine" \
679
  --lr_warmup_steps=0 \
680
  --num_train_epochs='+str(Training_Epochs), shell=True)
681
 
682
 
683
 
684
- if UNet_Training_Epochs!=0:
685
- train_only_unet(SESSION_DIR, MODEL_NAME, INSTANCE_DIR, OUTPUT_DIR, Seed, Resolution, ofstnse, extrnlcptn, precision, Training_Epochs=UNet_Training_Epochs)
 
 
 
686
  else :
687
  print('Nothing to do')
688
 
 
689
  if os.path.exists(SESSION_DIR+'/'+Session_Name+'.safetensors'):
690
  clear_output()
691
  print("DONE, the LoRa model is in the session's folder")
 
67
  os.environ['TORCH_HOME'] = '/notebooks/cache/torch'
68
  os.environ['PYTHONWARNINGS'] = 'ignore'
69
  call("sed -i 's@text = _formatwarnmsg(msg)@text =\"\"@g' /usr/lib/python3.9/warnings.py", shell=True)
70
+ #call('pip install --root-user-action=ignore --disable-pip-version-check -qq gradio==3.39.0', shell=True, stdout=open('/dev/null', 'w'))
71
  if not os.path.exists('/notebooks/diffusers'):
72
  call('ln -s /diffusers /notebooks', shell=True)
73
  call("rm -r /deps", shell=True)
 
615
 
616
 
617
 
618
+ def dbtrainxl(Unet_Training_Epochs, Text_Encoder_Training_Epochs, Unet_Learning_Rate, Text_Encoder_Learning_Rate, dim, Offset_Noise, Resolution, MODEL_NAME, SESSION_DIR, INSTANCE_DIR, CAPTIONS_DIR, External_Captions, INSTANCE_NAME, Session_Name, OUTPUT_DIR, ofstnselvl, Save_VRAM):
619
+
620
 
621
  if os.path.exists(INSTANCE_DIR+"/.ipynb_checkpoints"):
622
  call('rm -r '+INSTANCE_DIR+'/.ipynb_checkpoints', shell=True)
 
624
  call('rm -r '+CAPTIONS_DIR+'/.ipynb_checkpoints', shell=True)
625
 
626
 
 
 
 
 
627
  Seed=random.randint(1, 999999)
628
 
629
  ofstnse=""
 
640
 
641
  precision="fp16"
642
 
643
+
644
+
645
+ def train_only_text(SESSION_DIR, MODEL_NAME, INSTANCE_DIR, OUTPUT_DIR, Seed, Resolution, ofstnse, extrnlcptn, precision, Training_Epochs):
646
+ print('Training the Text Encoder...')
647
+ call('accelerate launch /notebooks/diffusers/examples/dreambooth/train_dreambooth_sdxl_TI.py \
648
+ '+ofstnse+' \
649
+ '+extrnlcptn+' \
650
+ --dim='+str(dim)+' \
651
+ --ofstnselvl='+str(ofstnselvl)+' \
652
+ --image_captions_filename \
653
+ --Session_dir='+SESSION_DIR+' \
654
+ --pretrained_model_name_or_path='+MODEL_NAME+' \
655
+ --instance_data_dir='+INSTANCE_DIR+' \
656
+ --output_dir='+OUTPUT_DIR+' \
657
+ --captions_dir='+CAPTIONS_DIR+' \
658
+ --seed='+str(Seed)+' \
659
+ --resolution='+str(Resolution)+' \
660
+ --mixed_precision='+str(precision)+' \
661
+ --train_batch_size=1 \
662
+ --gradient_accumulation_steps=1 '+GC+ ' \
663
+ --use_8bit_adam \
664
+ --learning_rate='+str(Text_Encoder_Learning_Rate)+' \
665
+ --lr_scheduler="cosine" \
666
+ --lr_warmup_steps=0 \
667
+ --num_train_epochs='+str(Training_Epochs), shell=True)
668
+
669
 
670
 
671
  def train_only_unet(SESSION_DIR, MODEL_NAME, INSTANCE_DIR, OUTPUT_DIR, Seed, Resolution, ofstnse, extrnlcptn, precision, Training_Epochs):
672
+ print('Training the UNet...')
673
  call('accelerate launch /notebooks/diffusers/examples/dreambooth/train_dreambooth_sdxl_lora.py \
 
674
  '+ofstnse+' \
675
  '+extrnlcptn+' \
676
  --dim='+str(dim)+' \
 
687
  --train_batch_size=1 \
688
  --gradient_accumulation_steps=1 '+GC+ ' \
689
  --use_8bit_adam \
690
+ --learning_rate='+str(Unet_Learning_Rate)+' \
691
  --lr_scheduler="cosine" \
692
  --lr_warmup_steps=0 \
693
  --num_train_epochs='+str(Training_Epochs), shell=True)
694
 
695
 
696
 
697
+ if Unet_Training_Epochs!=0:
698
+ if Text_Encoder_Training_Epochs!=0:
699
+ train_only_text(SESSION_DIR, MODEL_NAME, INSTANCE_DIR, OUTPUT_DIR, Seed, Resolution, ofstnse, extrnlcptn, precision, Training_Epochs=Text_Encoder_Training_Epochs)
700
+ clear_output()
701
+ train_only_unet(SESSION_DIR, MODEL_NAME, INSTANCE_DIR, OUTPUT_DIR, Seed, Resolution, ofstnse, extrnlcptn, precision, Training_Epochs=Unet_Training_Epochs)
702
  else :
703
  print('Nothing to do')
704
 
705
+
706
  if os.path.exists(SESSION_DIR+'/'+Session_Name+'.safetensors'):
707
  clear_output()
708
  print("DONE, the LoRa model is in the session's folder")