# 1. 导入adbapi
# 2. 生成数据库连接池
# 3. 执行数据数据库插入操作
# 4.异常捕获

from pymysql import cursors
from twisted.enterprise import adbapi
# 定义管道类实现异步插入


class BDuPipeline:

    def __init__(self, dbpool):
        self.dbpool = dbpool

    # 初始化数据库连接对象
    @classmethod
    def from_settings(cls, settings):
        # 当scrapy爬虫启动的时候，scrapy会自动调用，加载配置数据
        db_params = dict(
            host=settings['HOST'],
            user=settings['USER'],
            password=settings['PASSWD'],
            port=settings['PORT'],
            db=settings['DB'],
            charset=settings['CHARSET'],
            # 设置游标类型 dict
            cursorclass=cursors.DictCursor
        )

        # 创建一个数据库连接池对象，这个连接池中可以包含多个connect连接对象
        # dbapiName :操作数据库的包名  **connkw： 连接数据库的参数
        db_connect_pool = adbapi.ConnectionPool('pymysql', **db_params)

        # 初始化数据库连接池对象
        obj = cls(db_connect_pool)
        return obj

    # 处理item函数
    def process_item(self, item, spider):
        # 把要执行的sql语句放入连接池
        query = self.dbpool.runInteraction(self.insert_into, item)
        # sql执行错误，自动调用addErrback()函数
        query.addErrback(self.handel_error)
        return item

    # 插入sql语句函数
    def insert_into(self, cursor, item):
        # 创建sql
        title = item.get('title', '').replace("'", '"')
        source = item.get('source', '').replace("'", '"')
        content = item.get('content', '').replace("'", '"')
        datetime = item.get('datetime', '')
        keyword = item.get('keyword', '').replace("'", '"')
        insert_sql = "INSERT INTO baidu_spider(title,source,datetime,keyword,content) " \
                     "values ('{}','{}','{}','{}','{}');".format(title, source, str(datetime), keyword, content)
        # 执行sql语句
        cursor.execute(insert_sql)

    # 输出错误信息
    def handel_error(self, failure):
        print(failure)
