# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
from scrapy.exceptions import DropItem
from utils.baidu import Baidu
from utils.qq import QQ
from utils.weibo import Weibo


class BaiduCheckEmptyPipeline(object):
    def process_item(self, item, spider):
        if not item["title"]:
            raise DropItem("Title is empty, DROPPED!!!")

        if not item["link"]:
            raise DropItem("Link is empty, DROPPED!!!")

        return item


class BaiduSavePipeline(object):
    def process_item(self, item, spider):
        model = Baidu()

        id = model.is_link_crawled(item["link"])
        if id:
            model.update(id, item)
            raise DropItem("Link had crawled, update info and DROPPED!!!")

        model.add(item)

        return item


class QQCheckEmptyPipeline(object):
    def process_item(self, item, spider):
        if not item["title"]:
            raise DropItem("Title is empty, DROPPED!!!")

        if not item["link"]:
            raise DropItem("Link is empty, DROPPED!!!")

        if not item["preview_images"]:
            raise DropItem("Preview images is empty, DROPPED!!!")

        return item


class QQSavePipeline(object):
    def process_item(self, item, spider):
        model = QQ()

        id = model.is_link_crawled(item["link"])
        if id:
            model.update(id, item)
            raise DropItem("Link had crawled, update info and DROPPED!!!")

        model.add(item)

        return item


class WeiboCheckEmptyPipeline(object):
    def process_item(self, item, spider):
        if not item["title"]:
            raise DropItem("Title is empty, DROPPED!!!")

        if not item["link"]:
            raise DropItem("Link is empty, DROPPED!!!")

        if not item["preview_images"]:
            raise DropItem("Preview images is empty, DROPPED!!!")

        if not item["author_avatar"]:
            raise DropItem("Author avatar is empty, DROPPED!!!")

        return item


class WeiboSavePipeline(object):
    def process_item(self, item, spider):
        model = Weibo()

        id = model.is_link_crawled(item["link"])
        if id:
            model.update(id, item)
            raise DropItem("Link had crawled, update info and DROPPED!!!")

        model.add(item)

        return item
