|
{ |
|
"LoRA_type": "LyCORIS/LoCon", |
|
"LyCORIS_preset": "full", |
|
"adaptive_noise_scale": 0, |
|
"additional_parameters": "--max_grad_norm=0", |
|
"block_alphas": "", |
|
"block_dims": "", |
|
"block_lr_zero_threshold": "", |
|
"bucket_no_upscale": true, |
|
"bucket_reso_steps": 64, |
|
"bypass_mode": false, |
|
"cache_latents": true, |
|
"cache_latents_to_disk": true, |
|
"caption_dropout_every_n_epochs": 0.0, |
|
"caption_dropout_rate": 0, |
|
"caption_extension": ".txt", |
|
"clip_skip": "1", |
|
"color_aug": false, |
|
"constrain": 0.0, |
|
"conv_alpha": 4, |
|
"conv_block_alphas": "", |
|
"conv_block_dims": "", |
|
"conv_dim": 8, |
|
"dataset_config": "", |
|
"debiased_estimation_loss": false, |
|
"decompose_both": false, |
|
"dim_from_weights": false, |
|
"dora_wd": true, |
|
"down_lr_weight": "", |
|
"enable_bucket": true, |
|
"epoch": 8, |
|
"factor": -1, |
|
"flip_aug": false, |
|
"fp8_base": false, |
|
"full_bf16": false, |
|
"full_fp16": false, |
|
"gpu_ids": "", |
|
"gradient_accumulation_steps": 1, |
|
"gradient_checkpointing": true, |
|
"keep_tokens": "0", |
|
"learning_rate": 1.0, |
|
"log_tracker_config": "", |
|
"log_tracker_name": "", |
|
"logging_dir": "", |
|
"lora_network_weights": "", |
|
"lr_scheduler": "cosine", |
|
"lr_scheduler_args": "", |
|
"lr_scheduler_num_cycles": "", |
|
"lr_scheduler_power": "", |
|
"lr_warmup": 0, |
|
"max_bucket_reso": 2048, |
|
"max_data_loader_n_workers": "0", |
|
"max_grad_norm": 1, |
|
"max_resolution": "1024,1024", |
|
"max_timestep": 1000, |
|
"max_token_length": "225", |
|
"max_train_epochs": "", |
|
"max_train_steps": "", |
|
"mem_eff_attn": false, |
|
"mid_lr_weight": "", |
|
"min_bucket_reso": 256, |
|
"min_snr_gamma": 0, |
|
"min_timestep": 0, |
|
"mixed_precision": "fp16", |
|
"model_list": "custom", |
|
"module_dropout": 0, |
|
"multi_gpu": false, |
|
"multires_noise_discount": 0.3, |
|
"multires_noise_iterations": 6, |
|
"network_alpha": 4, |
|
"network_dim": 8, |
|
"network_dropout": 0, |
|
"noise_offset": 0.0357, |
|
"noise_offset_type": "Multires", |
|
"num_cpu_threads_per_process": 2, |
|
"num_machines": 1, |
|
"num_processes": 1, |
|
"optimizer": "Prodigy", |
|
"optimizer_args": "decouple=True weight_decay=0.5 betas=0.9,0.99 use_bias_correction=False", |
|
"output_dir": "/workspace/output", |
|
"output_name": "dora_thumbnail_RaemuMix_civit_10_repeats", |
|
"persistent_data_loader_workers": false, |
|
"pretrained_model_name_or_path": "/workspace/RaemuMix.safetensors", |
|
"prior_loss_weight": 1.0, |
|
"random_crop": false, |
|
"rank_dropout": 0, |
|
"rank_dropout_scale": false, |
|
"reg_data_dir": "", |
|
"rescaled": false, |
|
"resume": "", |
|
"sample_every_n_epochs": 0, |
|
"sample_every_n_steps": 100, |
|
"sample_prompts": " A man in a dark cave holds a flaming torch in one hand and a staff with a white bird on it in the other. He is dressed in a dark armor and red hat.", |
|
"sample_sampler": "euler_a", |
|
"save_every_n_epochs": 1, |
|
"save_every_n_steps": 0, |
|
"save_last_n_steps": 0, |
|
"save_last_n_steps_state": 0, |
|
"save_model_as": "safetensors", |
|
"save_precision": "fp16", |
|
"save_state": false, |
|
"scale_v_pred_loss_like_noise_pred": false, |
|
"scale_weight_norms": 1, |
|
"sdxl": false, |
|
"sdxl_cache_text_encoder_outputs": false, |
|
"sdxl_no_half_vae": true, |
|
"seed": "12345", |
|
"shuffle_caption": true, |
|
"stop_text_encoder_training_pct": 0, |
|
"text_encoder_lr": 1.0, |
|
"train_batch_size": 2, |
|
"train_data_dir": "/workspace/thumbnail", |
|
"train_norm": false, |
|
"train_on_input": false, |
|
"training_comment": "", |
|
"unet_lr": 1.0, |
|
"unit": 1, |
|
"up_lr_weight": "", |
|
"use_cp": true, |
|
"use_scalar": false, |
|
"use_tucker": false, |
|
"use_wandb": true, |
|
"v2": false, |
|
"v_parameterization": false, |
|
"v_pred_like_loss": 0, |
|
"vae": "", |
|
"vae_batch_size": 0, |
|
"wandb_api_key": "64f5d133411f28d5430199c5765ed23361535bdf", |
|
"wandb_run_name": "", |
|
"weighted_captions": false, |
|
"xformers": "xformers" |
|
} |