[model_arguments] | |
v2 = true | |
v_parameterization = true | |
pretrained_model_name_or_path = "/content/pretrained_model/Replicant-V2.0_fp16.safetensors" | |
[optimizer_arguments] | |
min_snr_gamma = 5 | |
optimizer_type = "AdamW8bit" | |
learning_rate = 2e-6 | |
max_grad_norm = 1.0 | |
train_text_encoder = false | |
lr_scheduler = "constant" | |
lr_warmup_steps = 0 | |
[dataset_arguments] | |
enable_bucket = true | |
debug_dataset = false | |
in_json = "/content/fine_tune/config/meta_lat.json" | |
train_data_dir = "/content/fine_tune/train_data" | |
dataset_repeats = 1 | |
shuffle_caption = true | |
keep_tokens = 0 | |
resolution = "768,768" | |
caption_dropout_rate = 0 | |
caption_tag_dropout_rate = 0 | |
caption_dropout_every_n_epochs = 0 | |
color_aug = false | |
token_warmup_min = 1 | |
token_warmup_step = 0 | |
[training_arguments] | |
output_dir = "/content/fine_tune/output" | |
output_name = "matsuriv2" | |
save_precision = "fp16" | |
save_n_epoch_ratio = 10 | |
save_state = false | |
train_batch_size = 1 | |
max_token_length = 225 | |
mem_eff_attn = false | |
xformers = true | |
max_train_steps = 5000 | |
max_data_loader_n_workers = 8 | |
persistent_data_loader_workers = true | |
gradient_checkpointing = false | |
gradient_accumulation_steps = 1 | |
mixed_precision = "fp16" | |
logging_dir = "/content/fine_tune/logs" | |
log_prefix = "matsuriv2" | |
[sample_prompt_arguments] | |
sample_every_n_steps = 250 | |
sample_sampler = "ddim" | |
[saving_arguments] | |
save_model_as = "ckpt" | |