File size: 831 Bytes
0234203
1
{"base_model": "meta-llama/Llama-2-7b-hf", "data_path": "yahma/alpaca-cleaned", "instruction_key": "instruction", "input_key": "input", "output_key": "output", "output_dir": "./output", "device_map": "auto", "batch_size": 16, "micro_batch_size": 1, "num_epochs": 1, "max_steps": 250, "eval_steps": 200, "save_steps": 50, "learning_rate": 0.0001, "cutoff_len": 512, "val_set_size": 0, "lora_r": 8, "lora_alpha": 16, "lora_dropout": 0.05, "weight_decay": 0.02, "warmup_ratio": 0.03, "lr_scheduler_type": "cosine", "lora_target_modules": ["q_proj", "k_proj", "v_proj"], "train_on_inputs": true, "add_eos_token": true, "group_by_length": true, "resume_from_checkpoint": null, "wandb_project": "unionai-llm-fine-tuning", "wandb_run_name": "", "wandb_watch": "", "wandb_log_model": "", "debug_mode": false, "debug_train_data_size": 1024}