from transformers import AutoModelForCausalLM, AutoTokenizer | |
model_name = "TinyLlama/TinyLlama-1.1B-Chat-v1.0" | |
model = AutoModelForCausalLM.from_pretrained(model_name) | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
# Сохраняем модель и токенизатор локально | |
model.save_pretrained("./tinyllama") | |
tokenizer.save_pretrained("./tinyllama") |