from transformers import AutoModelForCausalLM,AutoTokenizer
#将模型下载到本地 pip install transformers

# model_name = "uer/gpt2-chinese-cluecorpussmall"
# cach_dir = "model/uer/gpt2-chinese-cluecorpussmall"
# AutoModelForCausalLM.from_pretrained(model_name,cache_dir=cach_dir)
# AutoTokenizer.from_pretrained(model_name,cache_dir=cach_dir)
# print("下载完成")


model_name = "google-bert/bert-base-chinese"
cach_dir = "model/google-bert/bert-base-chinese"
AutoModelForCausalLM.from_pretrained(model_name,cache_dir=cach_dir)
AutoTokenizer.from_pretrained(model_name,cache_dir=cach_dir)
print("下载完成")