stuff / Rosa2_config /config_file.toml
Squirz's picture
feat: upload Rosa2 lora model
91b5378
raw
history blame
1.56 kB
[model_arguments]
v2 = false
v_parameterization = false
pretrained_model_name_or_path = "/content/pretrained_model/meinamix_meinaV9.safetensors"
[additional_network_arguments]
no_metadata = false
unet_lr = 0.0001
text_encoder_lr = 5e-5
network_module = "networks.lora"
network_dim = 32
network_alpha = 16
network_train_unet_only = false
network_train_text_encoder_only = false
[optimizer_arguments]
min_snr_gamma = 4
optimizer_type = "AdamW8bit"
learning_rate = 0.0001
max_grad_norm = 1.0
lr_scheduler = "cosine_with_restarts"
lr_warmup_steps = 0.05
lr_scheduler_num_cycles = 0
[dataset_arguments]
debug_dataset = false
in_json = "/content/LoRA/meta_lat.json"
train_data_dir = "/content/LoRA/train_data/rosa"
dataset_repeats = 10
shuffle_caption = true
keep_tokens = 1
resolution = "512,512"
caption_dropout_rate = 0
caption_tag_dropout_rate = 0
caption_dropout_every_n_epochs = 0
color_aug = false
token_warmup_min = 1
token_warmup_step = 0
[training_arguments]
output_dir = "/content/LoRA/output"
output_name = "Rosa2"
save_precision = "fp16"
save_n_epoch_ratio = 1
train_batch_size = 2
max_token_length = 225
mem_eff_attn = false
xformers = true
max_train_epochs = 10
max_data_loader_n_workers = 8
persistent_data_loader_workers = true
gradient_checkpointing = true
gradient_accumulation_steps = 2
mixed_precision = "fp16"
clip_skip = 2
logging_dir = "/content/LoRA/logs"
log_prefix = "Rosa2"
noise_offset = 0.05
lowram = true
[sample_prompt_arguments]
sample_every_n_epochs = 1
sample_sampler = "euler"
[saving_arguments]
save_model_as = "safetensors"