frank-chieng commited on
Commit
b0964fb
1 Parent(s): 2fa620c

feat: upload sdxl_lora_architecture_siheyuan lora model

Browse files
sdxl_lora_architecture_siheyuan_config/config_file.toml ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [sdxl_arguments]
2
+ cache_text_encoder_outputs = true
3
+ no_half_vae = true
4
+ min_timestep = 0
5
+ max_timestep = 1000
6
+ shuffle_caption = false
7
+
8
+ [model_arguments]
9
+ pretrained_model_name_or_path = "/content/pretrained_model/sd_xl_base_1.0.safetensors"
10
+ vae = "/content/vae/sdxl_vae.safetensors"
11
+
12
+ [dataset_arguments]
13
+ debug_dataset = false
14
+ in_json = "/content/LoRA/meta_lat.json"
15
+ train_data_dir = "/content/LoRA/train_data"
16
+ dataset_repeats = 20
17
+ keep_tokens = 0
18
+ resolution = "1024,1024"
19
+ color_aug = false
20
+ token_warmup_min = 1
21
+ token_warmup_step = 0
22
+
23
+ [training_arguments]
24
+ output_dir = "/content/drive/MyDrive/kohya-trainer/output"
25
+ output_name = "sdxl_lora_architecture_siheyuan"
26
+ save_precision = "fp16"
27
+ save_every_n_epochs = 1
28
+ train_batch_size = 4
29
+ max_token_length = 225
30
+ mem_eff_attn = false
31
+ sdpa = true
32
+ xformers = false
33
+ max_train_epochs = 10
34
+ max_data_loader_n_workers = 8
35
+ persistent_data_loader_workers = true
36
+ gradient_checkpointing = true
37
+ gradient_accumulation_steps = 1
38
+ mixed_precision = "fp16"
39
+
40
+ [logging_arguments]
41
+ log_with = "wandb"
42
+ log_tracker_name = "sdxl_lora_architecture_siheyuan"
43
+ logging_dir = "/content/LoRA/logs"
44
+
45
+ [sample_prompt_arguments]
46
+ sample_every_n_epochs = 1
47
+ sample_sampler = "euler_a"
48
+
49
+ [saving_arguments]
50
+ save_model_as = "safetensors"
51
+
52
+ [optimizer_arguments]
53
+ optimizer_type = "AdaFactor"
54
+ learning_rate = 1e-5
55
+ max_grad_norm = 0
56
+ optimizer_args = [ "scale_parameter=False", "relative_step=False", "warmup_init=False",]
57
+ lr_scheduler = "constant_with_warmup"
58
+ lr_warmup_steps = 100
59
+
60
+ [additional_network_arguments]
61
+ no_metadata = false
62
+ network_module = "networks.lora"
63
+ network_dim = 32
64
+ network_alpha = 16
65
+ network_args = [ "conv_dim=32", "conv_alpha=16",]
66
+ network_train_unet_only = true
67
+
68
+ [advanced_training_config]
69
+ noise_offset = 0.1
70
+ adaptive_noise_scale = 0.01
71
+ min_snr_gamma = 5
sdxl_lora_architecture_siheyuan_config/sample_prompt.toml ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ [prompt]
2
+ width = 1024
3
+ height = 1024
4
+ scale = 7
5
+ sample_steps = 28
6
+ [[prompt.subset]]
7
+ prompt = "siheyuan, chinese architecture"
8
+