# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


import codecs
# useful for handling different item types with a single interface
import json
from scrapy import Request
from scrapy.exceptions import DropItem
from scrapy.exporters import JsonItemExporter
from scrapy.pipelines.images import ImagesPipeline
from db.MysqlDB import MysqlDb
from db.TwistedDBHelper import TwistedDBHelper


class BlogsPipeline:
    def process_item(self, item, spider):
        return item


"""
同步MySql数据库存储数据
"""


class MysqlPipeline(object):

    def __init__(self, db):
        self.db = db

    @classmethod
    def from_settings(cls, settings):
        # 获取单链接
        db = MysqlDb(settings["MYSQL_HOST"],
                     settings["MYSQL_PORT"],
                     settings["MYSQL_USER"],
                     settings["MYSQL_PASSWD"],
                     settings["MYSQL_DBNAME"])
        return cls(db)

    def process_item(self, item, spider):
        insert_sql = f"insert into blogs_new(title,img_url) VALUES ('{item['title']}', '{item['img_urls']}')"
        print(insert_sql)
        self.db.execute_db(insert_sql)
        return item


"""
异步Mysql写入数据
"""


class MySqlTwistedPipeline(object):

    def __init__(self):
        self.db = TwistedDBHelper()

    def process_item(self, item, spider):
        self.db.insert(item)
        return item


"""
自定义JSON文件保存导出
"""


class JsonWithEncodingPipeline(object):

    def __init__(self):
        # 设置json文件写入方式，w 覆盖 a 追加
        self.file = codecs.open('article.json', 'a', encoding='utf-8')

    # 方法重新，名称和参数均不能变
    def process_item(self, item, spider):
        lines = json.dumps(dict(item), ensure_ascii=False) + "\n"
        self.file.write(lines)
        return item

    # 方法重写，名称和参数均不能变
    def spider_close(self, spider):
        self.file.close()


"""
scrapy提供的官方Exporter导出
"""


class JsonExporterPipeline(object):
    def __init__(self):
        # 通过Exporter导出
        self.file = open('article-exporter.json', 'wb')
        self.exporter = JsonItemExporter(self.file, encoding='utf-8', ensure_ascii=False)
        self.exporter.start_exporting()

    # 方法重写，名称和参数均不能变
    def process_item(self, item, spider):
        self.exporter.export_item(item)
        return item

    # 方法重写，名称和参数均不能变
    def spider_close(self, spider):
        self.exporter.finish_exporting()
        self.file.close()


"""
图片下载类(需要安装 pillow )
"""


class ImgPipeline(ImagesPipeline):

    def get_media_requests(self, item, info):
        """
        确定下载文件地址
        """
        yield Request(url=item['img_urls'], meta={'item': item})

    def file_path(self, request, response=None, info=None, *, item=None):
        """
        获取文件下载路径
        """
        url = request.url
        file_name = url.split('/')[-1]
        return file_name

    def item_completed(self, results, item, info):
        """
        完成下载
        """
        image_paths = [x['path'] for ok, x in results if ok]
        if not image_paths:
            raise DropItem('Image Downloaded Failed')
        return item
