hails's picture
Upload 12B.yml
e616418
{"pipe-parallel-size": 1, "model-parallel-size": 4, "num-layers": 36, "hidden-size": 5120, "num-attention-heads": 40, "seq-length": 2048, "max-position-embeddings": 2048, "norm": "layernorm", "pos-emb": "rotary", "rotary_pct": 0.25, "no-weight-tying": true, "gpt_j_residual": true, "output_layer_parallelism": "column", "attention-config": [[["flash"], 36]], "scaled-upper-triang-masked-softmax-fusion": true, "bias-gelu-fusion": true, "optimizer": {"type": "Adam", "params": {"lr": 0.00012, "betas": [0.9, 0.95], "eps": 1e-08}}, "min_lr": 1.2e-05, "zero_optimization": {"stage": 1, "allgather_partitions": true, "allgather_bucket_size": 1260000000, "overlap_comm": true, "reduce_scatter": true, "reduce_bucket_size": 1260000000, "contiguous_gradients": true, "cpu_offload": false}, "train_micro_batch_size_per_gpu": 8, "gradient_accumulation_steps": 2, "data-impl": "mmap", "checkpoint-activations": true, "checkpoint-num-layers": 1, "partition-activations": true, "synchronize-each-layer": true, "gradient_clipping": 1.0, "weight-decay": 0.1, "hidden-dropout": 0, "attention-dropout": 0, "fp16": {"fp16": true, "enabled": true, "loss_scale": 0, "loss_scale_window": 1000, "initial_scale_power": 12, "hysteresis": 2, "min_loss_scale": 1}, "train-iters": 143000, "lr-decay-iters": 143000, "distributed-backend": "nccl", "lr-decay-style": "cosine", "warmup": 0.01, "checkpoint-factor": 1000, "extra-save-iters": [0, 1, 2, 4, 8, 16, 32, 64, 128, 256, 512], "eval-interval": 143000, "eval-iters": 10, "save": "/fsx/hailey/pythia/new_ckpts/pythia-v2-12b", "load": "/fsx/hailey/pythia/new_ckpts/pythia-v2-12b", "log-interval": 10, "steps_per_print": 10, "wall_clock_breakdown": true, "log-grad-norm": true, "train-data-paths": ["/fsx/pile/pile_20B_tokenizer_text_document"], "valid-data-paths": ["/fsx/pile/pile_20B_tokenizer_text_document"], "test-data-paths": ["/fsx/pile/pile_20B_tokenizer_text_document"], "tokenizer_type": "HFTokenizer", "vocab-file": "/fsx/pile/20B_tokenizer.json", "wandb_init_all_ranks": false, "wandb_host": "https://api.wandb.ai", "wandb_team": "eleutherai", "wandb_project": "pythia", "wandb_group": "v2-12B", "launcher": "slurm", "deepspeed_slurm": true}