lora-training / haruka /lora_chara_haruka_v1b_138i9r-5i4r.json
khanon's picture
adds new Haruka LoRA
c487f9f
raw
history blame
No virus
2.31 kB
{
"pretrained_model_name_or_path": "G:/sd/repo/models/Stable-diffusion/nai-animefull-final-pruned.safetensors",
"v2": false,
"v_parameterization": false,
"logging_dir": "",
"train_data_dir": "G:/sd/training/datasets/haruka/dataset",
"reg_data_dir": "",
"output_dir": "G:/sd/lora/trained/chara/haruka",
"max_resolution": "832,832",
"learning_rate": "1e-5",
"lr_scheduler": "constant_with_warmup",
"lr_warmup": "5",
"train_batch_size": 3,
"epoch": "4",
"save_every_n_epochs": "",
"mixed_precision": "fp16",
"save_precision": "fp16",
"seed": "31337",
"num_cpu_threads_per_process": 32,
"cache_latents": true,
"caption_extension": ".txt",
"enable_bucket": true,
"gradient_checkpointing": false,
"full_fp16": false,
"no_token_padding": false,
"stop_text_encoder_training": 0,
"use_8bit_adam": true,
"xformers": true,
"save_model_as": "safetensors",
"shuffle_caption": true,
"save_state": false,
"resume": "",
"prior_loss_weight": 1.0,
"text_encoder_lr": "1.5e-5",
"unet_lr": "1.5e-4",
"network_dim": 128,
"lora_network_weights": "",
"color_aug": false,
"flip_aug": false,
"clip_skip": 2,
"gradient_accumulation_steps": 1.0,
"mem_eff_attn": false,
"output_name": "chara-haruka-v1b-128",
"model_list": "",
"max_token_length": "150",
"max_train_epochs": "",
"max_data_loader_n_workers": "",
"network_alpha": 128,
"training_comment": "Character: `haruka, 1girl, halo, short hair with long locks, purple eyes`\nStandard outfit: `hairclip, garrison cap, purple jacket, collared shirt, black skirt, miniskirt, black belt, boots`\nNew Year alt: `kimono skirt, purple kimono, floral print, purple hairband, hair flower, wide sleeves, obi, geta, tabi`\nNervous expression: `nervous, sweat, wavy mouth`\nVaguely yandere-like smile: `naughty face, nervous smile`\nFlustered: `sanpaku, wide-eyed, shaded face, flustered`\n\nNot all tags are necessary.\n\n(138 normal * 9 repeats + 5 multipleviews * 4 repeats) / 3 batch size * 4 epochs = 1683 steps",
"keep_tokens": 2,
"lr_scheduler_num_cycles": "",
"lr_scheduler_power": "",
"persistent_data_loader_workers": true,
"bucket_no_upscale": true,
"random_crop": false,
"bucket_reso_steps": 64.0,
"caption_dropout_every_n_epochs": 0.0,
"caption_dropout_rate": 0
}