# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
import mysql.connector
from mysql.connector.cursor import MySQLCursor
from twisted.enterprise import adbapi

class LywPipeline(object):

    def __init__(self, mysql_config):
        print('流水线初始化方法.....')
        self.db_pool = adbapi.ConnectionPool(mysql_config['DRIVER'],
                                             host=mysql_config['HOST'],
                                             user=mysql_config['USER'],
                                             password=mysql_config['PASSWORD'],
                                             database=mysql_config['DATABASE'],
                                             charset='utf8',
                                             auth_plugin=mysql_config['AUTH_PLUGIN'])


    @classmethod
    def from_crawler(cls, crawler):
        print('流水线静态方法.....')
        mysql_config = crawler.settings["MYSQL_DB_CONFIG"]
        return cls(mysql_config)

    def insert_item(self, cursor, item):
        # 插入数据到数据库
        insert_sql = "INSERT INTO lieyun (title, time, content,url) VALUES (%s, %s, %s,%s)"
        data = (item["title"], item["time"], item["content"], item["url"])
        cursor.execute(insert_sql, data)

    def process_item(self, item, spider):
        result = self.db_pool.runInteraction(self.insert_item, item)
