"""
nltk分词
"""


from nltk import word_tokenize
import jieba


sentence = '郭靖和哀牢山三十六剑。'
space_sentence = ' '.join(jieba.cut(sentence))
print(space_sentence)

# 使用nltk分词
tokenized_sentence = word_tokenize(space_sentence)
print(tokenized_sentence)

sentence = 'What is your name?'
tokenized_sentence = word_tokenize(sentence)
print(tokenized_sentence)
