lunar-llm-mistral-7B-3epoch / training_params.json
404NotF0und's picture
Upload folder using huggingface_hub
c10c1b2 verified
{
"model": "mistralai/Mistral-7B-v0.1",
"project_name": "lunar-llm-mistral-7B",
"data_path": "MtG-json-to-ForgeScribe",
"train_split": "train",
"valid_split": null,
"add_eos_token": false,
"block_size": 1024,
"model_max_length": 1024,
"padding": null,
"trainer": "default",
"use_flash_attention_2": false,
"log": "none",
"disable_gradient_checkpointing": false,
"logging_steps": -1,
"evaluation_strategy": "epoch",
"save_total_limit": 1,
"save_strategy": "epoch",
"auto_find_batch_size": false,
"mixed_precision": "fp16",
"lr": 0.0001,
"epochs": 3,
"batch_size": 10,
"warmup_ratio": 0.1,
"gradient_accumulation": 4,
"optimizer": "adamw_torch",
"scheduler": "linear",
"weight_decay": 0.01,
"max_grad_norm": 1.0,
"seed": 42,
"apply_chat_template": false,
"quantization": "int4",
"target_modules": null,
"merge_adapter": true,
"peft": true,
"lora_r": 64,
"lora_alpha": 16,
"lora_dropout": 0.1,
"model_ref": null,
"dpo_beta": 0.1,
"prompt_text_column": "prompt",
"text_column": "text",
"rejected_text_column": "rejected",
"push_to_hub": true,
"repo_id": "404NotF0und/lunar-llm-mistral-7B",
"username": null
}