import jieba

def tokenize_zh(crud_list):
    tmp_list = []
    res_list = []
    for line in crud_list:
        line = line.strip()
        tmp_list.append(jieba.lcut(line))

    for i in tmp_list:
        res_list.append(" ".join(i) + "\n")
    return res_list
