torchnet / tests /spacy_test.py
milselarch's picture
push to main
df07554
raw
history blame contribute delete
298 Bytes
from typing import Iterable, List
from torchtext.data.utils import get_tokenizer
token_transform = get_tokenizer(
'spacy', language='en_core_web_sm'
)
# Example sentence
sentence = "Hello, this is an example sentence."
# Tokenize the sentence
tokens = token_transform(sentence)
print(tokens)