# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
from scrapy.pipelines.images import ImagesPipeline
from scrapy.exporters import JsonItemExporter
import codecs
import json
import MySQLdb
import MySQLdb.cursors
from twisted.enterprise import adbapi


class BolezaixianPipeline(object):
    def process_item(self, item, spider):
        return item


class BoleImagesPipelin(ImagesPipeline):

    def item_completed(self, results, item, info):
        """指定item中front_img_path的路径"""
        for ok, value in results:
            image_path = value['path']
        item['front_img_path'] = image_path
        return item


class JsonExporterPipeline(object):
    """调用scrapy的exporter方法导出json文件"""
    # 初始化
    def __init__(self):
        self.file = open('expro.json', 'wb')
        self.exporter = JsonItemExporter(self.file, encoding="utf-8", ensure_ascii=False)
        self.exporter.start_exporting()

    # 关闭文件
    def close_spider(self, spider):
        self.exporter.finish_exporting()
        self.file.close()

    # 返回item
    def process_item(self, item, spider):
        self.exporter.export_item(item)
        return item


class MysqlPipeline(object):
    """同步mysql保存数据"""
    def __init__(self):
        self.coon = MySQLdb.connect('127.0.0.1', 'root', 'mysql', 'bolezaixian', charset='utf8', use_unicode=True)
        self.cursor = self.coon.cursor()

    def process_item(self, item, spider):
        insert_sql = """
            insert into bolewenzhang(title, crat_time, url, url_object_id, front_img_url, front_img_path, comment_nums,
             collection_nums, tags, content) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
        """
        self.cursor.execute(insert_sql, (item['title'], item['crat_time'], item['url'], item['url_object_id'],
                                         item['front_img_url'], item['front_img_path'], item['comment_nums'],
                                         item['collection_nums'], item['tags'], item['content']))
        self.coon.commit()


class MysqlTwistedPipeline(object):
    """异步保存mysql数据：考虑到scrapy提取数据快，而mysql保存数据慢，所以进行异步存储"""
    # Twisted只是提供一个异步容器，本身没提供数据库链接
    def __init__(self, dbpool):
        self.dbpool = dbpool

    @classmethod
    def from_settings(cls, settings):
        # 配置mysql数据库
        dbparms = dict(
            host=settings['MYSQL_HOST'],
            db=settings['MYSQL_DBNAME'],
            user=settings['MYSQL_USER'],
            password=settings['MYSQL_PASSWORD'],
            charset='utf8',
            cursorclass=MySQLdb.cursors.DictCursor,
            use_unicode=True
        )
        # 连接异步容器
        dbpool = adbapi.ConnectionPool('MySQLdb', **dbparms)
        return cls(dbpool)

    def process_item(self, item, spider):
        # 使用twisted将mysql插入异步执行
        query = self.dbpool.runInteraction(self.do_insert, item)
        # 错误处理
        query.addErrback(self.handle_error)

    def handle_error(self, falure):
        """异常处理函数"""
        print(falure)

    def do_insert(self, cursor, item):
        """执行具体的插入语句"""
        insert_sql = """
                    insert into bolewenzhang(title, crat_time, url, url_object_id, front_img_url, front_img_path, comment_nums,
                     collection_nums, tags, content) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
                """
        cursor.execute(insert_sql, (item['title'], item['crat_time'], item['url'], item['url_object_id'],
                                         item['front_img_url'], item['front_img_path'], item['comment_nums'],
                                         item['collection_nums'], item['tags'], item['content']))