--- library_name: transformers base_model: openchat/openchat-3.5-0106 datasets: - berkeley-nest/Nectar license: apache-2.0 --- max_steps = 200 learning_rate = 1e-6 warmup_ratio = 0.1 dpo_beta = 0.4 use_rslora = True use_loftq = False lora_rank = 128 lora_alpha = 256 load_separate_reference_model = False optim = "paged_lion_32bit"