Meta-Llama-3.1-8B-6-layers / run_llama_6.sh
sanchit-gandhi's picture
Upload folder using huggingface_hub
8177cf7 verified
raw
history blame
204 Bytes
#!/usr/bin/env bash
python3 run_initialization.py \
--model_name_or_path "meta-llama/Meta-Llama-3.1-8B" \
--num_hidden_layers "6" \
--output_dir "./" \
--torch_dtype "bfloat16" \
--push_to_hub