adam_beta1: '0.9' | |
adam_beta2: '0.999' | |
assistant_tag: gpt | |
bf16: 'True' | |
content_tag: value | |
cutoff_len: '2048' | |
dataset: mlfoundations-dev/oh-dcft-v3-llama3.1-nemotron-70b_shareGPT_format | |
dataset_dir: ONLINE | |
ddp_timeout: '180000000' | |
deepspeed: /opt/ml/code/zero3.json | |
do_train: 'True' | |
enable_liger_kernel: 'False' | |
eval_strategy: epoch | |
finetuning_type: full | |
formatting: sharegpt | |
global_batch_size: '2048' | |
gradient_accumulation_steps: '8' | |
gradient_checkpointing: 'True' | |
hub_model_id: mlfoundations-dev/hp_ablations_grid_mistral_bsz2048_lr5e-6_scheduler-cosine-warmup0.15-minlr5e-7 | |
learning_rate: 5e-06 | |
logging_steps: '10' | |
lr_scheduler_kwargs: min_lr=5e-07 | |
lr_scheduler_type: cosine_with_min_lr | |
max_grad_norm: '1' | |
messages: conversations | |
model_name_or_path: mistralai/Mistral-7B-v0.1 | |
neat_packing: 'True' | |
num_train_epochs: '3.0' | |
output_dir: /opt/ml/model | |
overwrite_cache: 'True' | |
overwrite_output_dir: 'True' | |
packing: 'True' | |
per_device_train_batch_size: '8' | |
plot_loss: 'True' | |
preprocessing_num_workers: '16' | |
push_to_db: 'True' | |
push_to_hub: 'True' | |
report_to: wandb | |
role_tag: from | |
run_name: hp_ablations_grid_mistral_bsz2048_lr5e-6_scheduler-cosine-warmup0.15-minlr5e-7 | |
save_strategy: epoch | |
stage: sft | |
template: mistral | |
user_tag: human | |
val_size: '0.05' | |
warmup_ratio: '0.15' | |
weight_decay: '0.1' | |