# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
# from itemadapter import ItemAdapter
from scrapy.pipelines.images import ImagesPipeline
from twisted.enterprise import adbapi
import MySQLdb


class CnblogsspiderPipeline:
    def process_item(self, item, spider):
        return item


class ArticleImagePipeline(ImagesPipeline):
    def item_completed(self, results, item, info):
        image_file_path = ''
        if 'front_image_url' in item:
            for ok, value in results:
                image_file_path = value['path']
            item['front_image_path'] = image_file_path
        return item

class MysqlPipeline(object):
    '''
    同步入库
    '''
    def __init__(self):
        self.conn = MySQLdb.connect('127.0.0.1', 'root', 'root', 'article_spider', charset='utf8',use_unicode=True)
        self.cursor = self.conn.cursor()

    def process_item(self, item, spider):
        insert_sql = """
            insert into cnblogs_spider(title, url, url_object_id,front_image_url, front_image_path,praise_nums, fav_nums, tags,content,create_date,comment_nums )
            values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
        """
        params = list()
        params.append(item['title'])
        params.append(item['url'])
        params.append(item['url_object_id'])
        params.append(item['front_image_url'])
        params.append(item['front_image_path'])
        params.append(item['praise_nums'])
        params.append(item['fav_nums'])
        params.append(item['content'])
        params.append(item['create_date'])
        params.append(item['comment_nums'])
        self.cursor.execute(insert_sql, tuple(params))
        self.conn.commit()


class MysqlTwistedPipeline(object):
    def __init__(self, dbpool):
        self.dbpool = dbpool

    @classmethod
    def from_settings(cls, settings):
        from MySQLdb.cursors import DictCursor
        dbparms = dict(
            host=settings['MYSQL_HOST'],
            db=settings['MYSQL_NAME'],
            user=settings['MYSQL_USER'],
            passwd=settings['MYSQL_PASSWORD'],
            charset='utf8',
            cursorclass=DictCursor,
            use_unicode=True,
        )
        dbpool = adbapi.ConnectionPool('MySQLdb', **dbparms)
        return cls(dbpool)

    def process_item(self, item, spider):
        query = self.dbpool.runInteraction(self.do_insert, item)
        query.addErrback(self.handle_error, item, spider)

    def handle_error(self, failure, item, spider):
        print(failure)

    def do_insert(self, cursors, item):
        insert_sql = """
                    insert into cnblogs_spider(title, url, url_object_id,front_image_url, front_image_path,praise_nums, fav_nums, tags,content,create_date,comment_nums )
                    values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s) ON DUPLICATE KEY UPDATE praise_nums=VALUES(praise_nums) 
                """
        params = list()
        params.append(item['title'])
        params.append(item['url'])
        params.append(item['url_object_id'])
        front_image_url = ','.join(item.get('front_image_url', []))
        params.append(front_image_url)
        params.append(item['front_image_path'])
        params.append(item['praise_nums'])
        params.append(item['fav_nums'])
        params.append(item['tags'])
        params.append(item['content'])
        params.append(item['create_date'])
        params.append(item['comment_nums'])
        cursors.execute(insert_sql, tuple(params))



