File size: 1,274 Bytes
d49da6b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 |
{
"model": "abhinand/llama-2-13b-hf-bf16-sharded",
"project_name": "llama2-13b-1000Rows3Epochs",
"data_path": "Data/",
"train_split": "train",
"valid_split": null,
"add_eos_token": false,
"block_size": -1,
"model_max_length": 2048,
"padding": null,
"trainer": "sft",
"use_flash_attention_2": false,
"log": "none",
"disable_gradient_checkpointing": false,
"logging_steps": -1,
"evaluation_strategy": "epoch",
"save_total_limit": 1,
"save_strategy": "epoch",
"auto_find_batch_size": false,
"mixed_precision": null,
"lr": 0.0001,
"epochs": 8,
"batch_size": 3,
"warmup_ratio": 0.1,
"gradient_accumulation": 1,
"optimizer": "adamw_torch",
"scheduler": "linear",
"weight_decay": 0.0,
"max_grad_norm": 1.0,
"seed": 42,
"apply_chat_template": false,
"quantization": "int4",
"target_modules": null,
"merge_adapter": true,
"peft": true,
"lora_r": 16,
"lora_alpha": 32,
"lora_dropout": 0.05,
"model_ref": null,
"dpo_beta": 0.1,
"prompt_text_column": "prompt",
"text_column": "text",
"rejected_text_column": "rejected",
"push_to_hub": true,
"repo_id": "Jimmyhd/llama213b8Epochs1000Rows",
"username": null
} |