# from transformers import AutoModelForCausalLM, AutoTokenizer

# 使用modelscope下载模型 
from modelscope import snapshot_download

model = snapshot_download("LLM-Research/Llama-3.2-1B-Instruct", cache_dir="/home/ubuntu/jack/model_cache")
# print(model)