# /// script | |
# requires-python = ">=3.12" | |
# dependencies = [ | |
# "numpy", | |
# "einops", | |
# "torch", | |
# "transformers", | |
# "datasets", | |
# "accelerate", | |
# "timm", | |
# ] | |
# /// | |
try: | |
# prepare the model input | |
prompt = "Give me a brief explanation of gravity in simple terms." | |
messages_think = [ | |
{"role": "user", "content": prompt} | |
] | |
text = tokenizer.apply_chat_template( | |
messages_think, | |
tokenize=False, | |
add_generation_prompt=True, | |
) | |
model_inputs = tokenizer([text], return_tensors="pt").to(model.device) | |
# Generate the output | |
generated_ids = model.generate(**model_inputs, max_new_tokens=32768) | |
# Get and decode the output | |
output_ids = generated_ids[0][len(model_inputs.input_ids[0]) :] | |
print(tokenizer.decode(output_ids, skip_special_tokens=True)) | |
with open('HuggingFaceTB_SmolLM3-3B_4.txt', 'w') as f: | |
f.write('Everything was good in HuggingFaceTB_SmolLM3-3B_4.txt') | |
except Exception as e: | |
with open('HuggingFaceTB_SmolLM3-3B_4.txt', 'w') as f: | |
import traceback | |
traceback.print_exc(file=f) | |
finally: | |
from huggingface_hub import upload_file | |
upload_file( | |
path_or_fileobj='HuggingFaceTB_SmolLM3-3B_4.txt', | |
repo_id='model-metadata/custom_code_execution_files', | |
path_in_repo='HuggingFaceTB_SmolLM3-3B_4.txt', | |
repo_type='dataset', | |
) |