# -*- coding: utf-8 -*-
from WebCrawler.MQ import Consumer
from scrapy.selector import Selector
from readability import Document

#import preprocess.fenci as fenci
import jieba.analyse

IP="127.0.0.1"

words_cloud = {}

def insert_words_cloud(words):
    for w in words:
        if w not in words_cloud:
            words_cloud[w] = 1
        else:
            count = words_cloud[w]
            words_cloud[w] = count +1

def show_top_of_words_cloud():
    sorted_list = sorted(words_cloud.items(), key=lambda d:d[1],reverse=True)
    top10 = sorted_list[:10]
    all = ",".join(a[0] for a in top10)
    print(all)
    pass


if __name__ == '__main__':

    article_num = 0
    articles = []
    def callback(channel, method, properties, msg):  # 四个参数为标准格式
        global article_num
        #print(channel, method, properties)  # 打印看一下是什么
        # 管道内存对象  内容相关信息  后面讲
        print(" [x] Received %r" % msg[:40])

        article = str(msg, encoding = "utf-8")

        articles.append(article)
        article_num += 1
        print("%d" % article_num)

        readable_article_html = Document(article).summary()
        readable_title = Document(article).short_title()
        readable_article = Selector(text=readable_article_html).xpath("//text()").extract()
        purge_doc = "".join(readable_article)

        tags = jieba.analyse.extract_tags(purge_doc)
        #words = fenci.sentence_fenci(purge_doc)
        insert_words_cloud(tags)
        show_top_of_words_cloud()

        channel.basic_ack(delivery_tag=method.delivery_tag)  # 告诉生成者，消息处理完成

    c = Consumer(IP)
    c.run("article",callback=callback)
