#分词
from nltk import word_tokenize     #以空格形式实现分词
paragraph = " The first snow came. How beautiful it was, falling so silently all day long, all night long, on the mountains, on the meadows, on the roofs of the living, on the graves of the dead! All white save the river, that marked its course by a winding black line across the landscape; and the leafless trees, that against the leaden sky now revealed more fully the wonderful beauty and intricacies of their branches. What silence, too, came with the snow, and what seclusion! Every sound was muffled, every noise changed to something soft and musical. No more tramping hoofs, no more rattling wheels! Only the chiming of the sleigh-bells, beating as swift and merrily as the hearts of children."
words = word_tokenize(paragraph)
print(words)

#分句
from nltk import sent_tokenize    #以符号形式实现分句
sentences = " The first snow came. How beautiful it was, falling so silently all day long, all night long, on the mountains, on the meadows, on the roofs of the living, on the graves of the dead! All white save the river, that marked its course by a winding black line across the landscape; and the leafless trees, that against the leaden sky now revealed more fully the wonderful beauty and intricacies of their branches. What silence, too, came with the snow, and what seclusion! Every sound was muffled, every noise changed to something soft and musical. No more tramping hoofs, no more rattling wheels! Only the chiming of the sleigh-bells, beating as swift and merrily as the hearts of children.!"
sentence = sent_tokenize(sentences )
print(sentence)

