#!/usr/bin/env python
# -*- conding: utf-8 -*-

"""
@Time     : 2025/11/7 22:32
@Author   : liujingmao
@File     : distill.py
"""


from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("Llama-3.2", padding=True, pad_token="[PAD]")
model = AutoModelForCausalLM.from_pretrained("Llama-3.2")

text = "示例文本"
input_ids = tokenizer.encode(text, return_tensors="pt", padding=True)
attention_mask = input_ids != tokenizer.pad_token_id
output_ids = model.generate(input_ids, max_new_tokens=256, attention_mask=attention_mask)
summary = tokenizer.decode(output_ids, skip_special_tokens=True)
