tokenspace / T5 /showtokensT5.py
ppbrown's picture
Upload T5/showtokensT5.py with huggingface_hub
801b8c7 verified
raw
history blame
845 Bytes
#!/bin/env python
"""
It turns out that T5 does some odd token padding of its text input.
This program shows the padding, in human readable form
"""
from transformers import T5Tokenizer,T5EncoderModel
import torch
T="mcmonkey/google_t5-v1_1-xxl_encoderonly"
tokenizer = T5Tokenizer.from_pretrained(T)
print("loded tokenzier")
def get_tokens(word):
tokens = tokenizer(word, return_tensors="pt")
input_ids = tokens.input_ids
print("Bare input_ids:",input_ids)
decoded_tokens = tokenizer.convert_ids_to_tokens(tokens["input_ids"][0])
print("Tokenized input:", decoded_tokens)
# id should be a numeral
def get_token_from_id(id):
decoded_tokens = tokenizer.convert_ids_to_tokens(id)
print("Tokenized id:", decoded_tokens)
get_tokens("cat")
get_tokens("dogs and cats living together")
get_token_from_id(1712)