import os
os.environ["HF_ENDPOINT"] = "https://hf-mirror.com"
from transformers import AutoModelForCausalLM,AutoTokenizer

tokenizer = AutoTokenizer.from_pretrained('I:\\models\\AI-ModelScope\\chinese-alpaca-2-7b\\')
model = AutoModelForCausalLM.from_pretrained('I:\\models\\AI-ModelScope\\chinese-alpaca-2-7b\\')
inputs = tokenizer('What is the capital of Australia?',return_tensors='pt')
outputs = model.generate(**inputs)
output = tokenizer.decode(outputs[0],skip_special_tokens=True)
print(output)