Ateras's picture
Upload folder using huggingface_hub
fe6327d
{
"pretrained_model_name_or_path": "runwayml/stable-diffusion-v1-5",
"v2": false,
"v_parameterization": false,
"train_dir": "D:/dataset/paige_spiranac/ft",
"image_folder": "D:\\dataset\\paige_spiranac\\lora\\img4_g8\\16_paige_spiranac",
"output_dir": "D:/models/test",
"logging_dir": "D:/dataset/paige_spiranac/ft/logs",
"max_resolution": "512,512",
"min_bucket_reso": "256",
"max_bucket_reso": "1024",
"batch_size": "1",
"flip_aug": false,
"caption_metadata_filename": "meta_cap.json",
"latent_metadata_filename": "meta_lat.json",
"full_path": true,
"learning_rate": "0.0000166666666",
"lr_scheduler": "cosine",
"lr_warmup": "10",
"dataset_repeats": "10",
"train_batch_size": 4,
"epoch": "2",
"save_every_n_epochs": "1",
"mixed_precision": "bf16",
"save_precision": "fp16",
"seed": "1234",
"num_cpu_threads_per_process": 2,
"train_text_encoder": true,
"create_caption": true,
"create_buckets": false,
"save_model_as": "safetensors",
"caption_extension": ".txt",
"use_8bit_adam": false,
"xformers": true,
"clip_skip": 1,
"save_state": false,
"resume": "",
"gradient_checkpointing": false,
"gradient_accumulation_steps": 1.0,
"mem_eff_attn": false,
"shuffle_caption": true,
"output_name": "paige_spiranac_v1.5e",
"max_token_length": "150",
"max_train_epochs": "",
"max_data_loader_n_workers": "0",
"full_fp16": false,
"color_aug": false,
"model_list": "runwayml/stable-diffusion-v1-5",
"cache_latents": true,
"use_latent_files": "No",
"keep_tokens": 1,
"persistent_data_loader_workers": false,
"bucket_no_upscale": true,
"random_crop": false,
"bucket_reso_steps": 1.0,
"caption_dropout_every_n_epochs": 0.0,
"caption_dropout_rate": 0.1,
"optimizer": "Lion",
"optimizer_args": "",
"noise_offset": ""
}