# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import hashlib
import pybloom

from scrapy import log

from xdb import get_db_cursor


class ECommerceSiteCrawlerSystemPipeline(object):

    def __init__(self):
        self.item_lst = []
        self.counter = 0
        self.item_lst_len = 2000
        self.bloom_filter = pybloom.ScalableBloomFilter()

    def open_spider(self, spider):
        sql = "select md5 from %s" % spider.table_name
        with get_db_cursor("downline", "e_commerce_site_crawler_system") as cursor:
            cursor.execute(sql)
            for md5, in cursor:
                self.bloom_filter.add(md5)

    def process_item(self, item, spider):
        all_content_lst = [
            item["url"], item["origin_url"],
            str(item["on_shelves"]), item["title"],
            str(item["price"]), item["attr"], str(item["comment_num"]),
            str(item["good_comment_num"]),
            str(item["medium_comment_num"]), str(item["bad_comment_num"])
        ]
        all_content_lst = map(lambda x: x if x is not None else "None", all_content_lst)
        md5 = hashlib.md5("".join(all_content_lst)).hexdigest()
        if md5 not in self.bloom_filter:
            self.item_lst.append(
                (
                    item["url"], item["origin_url"], md5,
                    item["on_shelves"], item["title"],
                    item["price"], item["attr"], item["comment_num"],
                    item["good_comment_num"], item["medium_comment_num"], item["bad_comment_num"],
                    item["origin_url"], md5,
                    item["on_shelves"], item["title"],
                    item["price"], item["attr"], item["comment_num"],
                    item["good_comment_num"], item["medium_comment_num"], item["bad_comment_num"],
                )
            )

        if len(self.item_lst) > self.item_lst_len:
            self.update_info_into_db(self.item_lst, spider)
            self.item_lst = []

    def update_info_into_db(self, item_lst, spider):
        if not item_lst:
            return
        with get_db_cursor("downline", "e_commerce_site_crawler_system") as cursor:
            sql = """insert into %s (url, origin_url, md5, on_shelves, title,
            price, attr, comment_num, good_comment_num,
            medium_comment_num, bad_comment_num) values (%%s, %%s, %%s, %%s, %%s, %%s, %%s, %%s, %%s, %%s, %%s)
            on duplicate key update origin_url=%%s, md5=%%s, on_shelves=%%s, title=%%s, price=%%s,
            attr=%%s, comment_num=%%s, good_comment_num=%%s,
            medium_comment_num=%%s, bad_comment_num=%%s""" % spider.table_name
            for item in item_lst:
                cursor.execute(sql, item)
                self.counter += 1

    def close_spider(self, spider):
        self.update_info_into_db(self.item_lst, spider)
        self.item_lst = []
        log.msg("done, update or insert %s item info of %s!" % (self.counter, spider.table_name), level=log.INFO)
