import pandas as pd
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import seaborn as sns
mpl.rcParams['font.sans-serif']=['SimHei'] #显示中文
plt.rcParams['axes.unicode_minus']=False #让负号正常显示
# 使用词云前要进行安装  pip install wordcloud
# 引入词云的模块
from wordcloud import WordCloud

# 处理中文分词的库  jieba   需要提前安装好  pip install jieba
import  jieba

w=WordCloud()
w.generate('python and wordcloud')
w.to_file('pic1.png')

x='我要对词云进行分词'
word_x=jieba.lcut(x)
print(word_x)

f=open('成熟的面对敌人.txt','r')
sentence=f.read()
f.close()
#去除空格和换行
sentence=sentence.replace(' ','')
sentence=sentence.replace('\n','').replace('\r','')

#读取停用词
stop_words=[]
for line in open('stopword.txt','r',encoding='utf-8'):
    stop_words.append(line.rstrip('\n'))
#文本分词并去除停用词,word之间用空格隔开
word_str=''
word_cut=jieba.lcut(sentence)
for word in word_cut:
    if word not in stop_words:
        word_str+=word+' '
        pass
    pass
#绘制词云
wc=WordCloud(
font_path='simhei.ttf',
background_color='white',
width=1000,
height=600,
mask=plt.imread('cloud.jpg'))
wc.generate(word_str)
wc.to_file('wordcloud.png')