# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html

from twisted.enterprise import adbapi
from itemadapter import ItemAdapter

class APipline(object):
    def process_item(self, item, spider):
            adapter = ItemAdapter(item)
            adapter['content'] = adapter['content'].replace('rel="noopener"','rel="noopener nofllow"')
            return adapter

class DbPipline(object):

    def open_spider(self, spider):
        adbparams = dict(
            host=spider.settings.get('MYSQL_HOST'),
            port=spider.settings.get('MYSQL_PORT'),
            database=spider.settings.get('MYSQL_DATABASE'),
            user=spider.settings.get('MYSQL_USER'),
            password=spider.settings.get('MYSQL_PASSWORD'),
            charset='utf8mb4'
        )
        self.dbpool = adbapi.ConnectionPool('pymysql', **adbparams)

    def process_item(self, item, spider):
            self.dbpool.runInteraction(self.insert_db, item)
            pass

    def handle_error(self, failure, item, spider):
        print("错误",failure,item)

    def close_spider(self, spider):
        self.dbpool.close()

    @staticmethod
    def insert_db(tx, item):
        tags = '|'.join(item['tags'])
        values = (
            item['title'],
            item['author'],
            item['desc'],
            item['content'],
            item['url'],
            tags,
            item['created_at']
        )
        sql = 'INSERT INTO wl_articles (title,author,remark, content,' \
              'source_url, tags, created_at) VALUES (%s,%s,%s,%s,%s,%s,%s)'
        tx.execute(sql, values)
        print("数据保存成功！", item['title'], item['url'])
