kohya_ss / presets /lora /sd15-EDG_LoConOptiSettings.json
Ateras's picture
Upload folder using huggingface_hub
fe6327d
raw
history blame
No virus
1.79 kB
{
"LoRA_type": "LyCORIS/LoCon",
"additional_parameters": "",
"block_alphas": "",
"block_dims": "",
"block_lr_zero_threshold": "",
"bucket_no_upscale": true,
"bucket_reso_steps": 64.0,
"cache_latents": true,
"caption_dropout_every_n_epochs": 0.0,
"caption_dropout_rate": 0,
"caption_extension": ".txt",
"clip_skip": 2,
"color_aug": false,
"conv_alpha": 1,
"conv_alphas": "",
"conv_dim": 32,
"conv_dims": "",
"down_lr_weight": "",
"enable_bucket": false,
"epoch": 1,
"flip_aug": false,
"full_fp16": false,
"gradient_accumulation_steps": 1.0,
"gradient_checkpointing": false,
"keep_tokens": "0",
"learning_rate": "0.0001",
"lora_network_weights": "",
"lr_scheduler": "constant",
"lr_scheduler_num_cycles": "",
"lr_scheduler_power": "",
"lr_warmup": "0",
"max_data_loader_n_workers": "1",
"max_resolution": "512,650",
"max_token_length": "75",
"max_train_epochs": "",
"mem_eff_attn": true,
"mid_lr_weight": "",
"min_snr_gamma": 0,
"mixed_precision": "bf16",
"network_alpha": 64,
"network_dim": 64,
"no_token_padding": false,
"noise_offset": "0.05",
"num_cpu_threads_per_process": 2,
"optimizer": "AdamW8bit",
"optimizer_args": "",
"persistent_data_loader_workers": false,
"prior_loss_weight": 1.0,
"random_crop": false,
"save_every_n_epochs": 1,
"save_precision": "bf16",
"seed": "1234",
"shuffle_caption": false,
"stop_text_encoder_training": 0,
"text_encoder_lr": "5e-05",
"train_batch_size": 3,
"training_comment": "",
"unet_lr": "0.0001",
"up_lr_weight": "",
"v2": false,
"v_parameterization": false,
"vae_batch_size": 0,
"xformers": true
}