model_name: pythia-160m out_dir: out/custom-model resume: false data: class_path: litgpt.data.TextFiles init_args: train_data_path: train seed: 42 num_workers: 4 train: save_interval: 1000 log_interval: 1 global_batch_size: 512 micro_batch_size: 4 lr_warmup_steps: 2000 max_tokens: 150000000 tie_embeddings: false learning_rate: 0.0004 weight_decay: 0.1 beta1: 0.9 beta2: 0.95 max_norm: 1.0 min_lr: 4.0e-05 eval: interval: 1000 max_iters: 100 devices: auto tokenizer_dir: checkpoints/EleutherAI/pythia-160m logger_name: tensorboard seed: 42