File size: 488 Bytes
dac5c38 b137f9b dac5c38 |
1 2 3 4 5 6 7 8 9 10 11 12 13 |
import os
from transformers import AutoModelForCausalLM, AutoTokenizer
# Define the local directory where the model will be saved
local_model_dir = "./llama-2-7b-hf"
# Create the directory if it doesn't exist
os.makedirs(local_model_dir, exist_ok=True)
# Download the model and tokenizer
model = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-2-7b-hf", cache_dir=local_model_dir)
tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-2-7b-hf", cache_dir=local_model_dir)
|