from transformers import AutoModelForCausalLM, AutoTokenizer
import time
model_name = r"C:\Users\strong\Desktop\fsdownload\Qwen2.5-0.5B"

start_time = time.time()
model = AutoModelForCausalLM.from_pretrained(
    model_name,
    torch_dtype="auto",
    device_map="auto"
)
tokenizer = AutoTokenizer.from_pretrained(model_name)

# 打印模型配置信息
print("Model Configuration:")
print(model.config)

# 或者直接打印模型结构，这将输出模型的所有层和参数
print("\nModel Structure:")
print(model)
