import torch
from transformers import AutoConfig, AutoModel, AutoTokenizer, AutoModelForSequenceClassification
torch.cuda.empty_cache()

#在线加载
model = AutoModel.from_pretrained("hfl/rbt3", force_download = True)

#模型下载（需要科学）
#git clone "https://huggingface.co/hfl/rbt3"
#指定下载
#git lfs clone "https://huggingface.co/hfl/rbt3" --include="*.bin"
#离线加载 下载后指定的路径
model = AutoModel.from_pretrained("rbt3")
print(model. config)
print("-----------------------------------------------------------------------------------------")
#模型加载参数
print(model.config)
config = AutoConfig.from_pretrained("hfl/rbt3")
print(config)
config.output_attentions

print("-----------------------------------------------------------------------------------------")
#模型调用
sen = "弱小的我也有大梦想！"
tokenizer = AutoTokenizer.from_pretrained("hfl/rbt3")
inputs = tokenizer(sen, return_tensors = "pt")
print(inputs)
print("-----------------------------------------------------------------------------------------")
#不带Model Head的模型调用
model = AutoModel.from_pretrained("hfl/rbt3")
output = model(**inputs)
print(output)
print(output.last_hidden_state.size())
print(len(inputs["input_ids"][0]))
print("-----------------------------------------------------------------------------------------")
#带Model Head的模型调用
clz_model = AutoModelForSequenceClassification.from_pretrained("hfl/rbt3", num_labels = 10)
print(clz_model(**inputs))
print(clz_model.config.num_labels)