import jieba
import wordcloud
import imageio
from snownlp import SnowNLP

try:
    # 读取词云形状图片
    mk = imageio.imread("chinamap.png")

    # 配置两个词云对象
    w1 = wordcloud.WordCloud(width=1000,
                             height=700,
                             background_color='white',
                             font_path='msyh.ttc',
                             mask=mk,
                             scale=15)
    w2 = wordcloud.WordCloud(width=1000,
                             height=700,
                             background_color='white',
                             font_path='msyh.ttc',
                             mask=mk,
                             scale=15)

    # 读取文本文件并分词
    with open('wwqy.txt', 'r', encoding='utf-8') as f:
        txt = f.read()

    # 使用jieba分词并过滤单字
    txtlist = jieba.lcut(txt)
    filtered_words = [word for word in txtlist if len(word) > 1]

    print('开始进行情感分析，请稍等...')

    # 情感分析
    positivelist = []
    negativelist = []

    for word in filtered_words:
        try:
            each_word = SnowNLP(word)
            feeling = each_word.sentiments
            if feeling > 0.9:  # 调整阈值
                positivelist.append(word)
            elif feeling < 0.1:  # 调整阈值
                negativelist.append(word)
        except Exception as e:
            print(f"处理词语 '{word}' 时出错: {e}")
            continue

    # 生成词云
    positive_string = " ".join(positivelist)
    negative_string = " ".join(negativelist)

    w1.generate(positive_string)
    w2.generate(negative_string)

    # 保存词云图片
    w1.savefig("positive_wordcloud.png")
    w2.savefig("negative_wordcloud.png")

    print('词云生成完成')

except FileNotFoundError:
    print("错误：找不到指定的文件，请检查文件路径和名称")
except Exception as e:
    print(f"发生未知错误：{e}")