# -*- coding: utf-8 -*-
# @Time : 2021/10/24 14:23
# @Author : 唐宁
# @Site : 
# @File : TwistedDBHelper.py
# @Software: PyCharm
# -*- coding: utf-8 -*-
import pymysql
from twisted.enterprise import adbapi
from scrapy.utils.project import get_project_settings


def _handle_error(failure):
    print(failure)


class TwistedDBHelper(object):

    def __init__(self):
        # 获取scrapy中的配置
        settings = get_project_settings()
        db_params = dict(
            host=settings['MYSQL_HOST'],
            db=settings['MYSQL_DBNAME'],
            user=settings['MYSQL_USER'],
            passwd=settings['MYSQL_PASSWD'],
            charset='utf8',  # 解决中文乱码问题
            cursorclass=pymysql.cursors.DictCursor,
            use_unicode=False,
        )
        # **表示将字典扩展为关键字参数,相当于host=xxx,db=yyy....
        dbpool = adbapi.ConnectionPool('pymysql', **db_params)
        self.__dbpool = dbpool

    def connect(self):
        return self.__dbpool

    def insert(self, item):
        # 这里定义要插入的字段
        sql = "insert into blogs_new(title,img_url,union_code) VALUES (%s,%s,%s) " \
              "ON DUPLICATE KEY UPDATE duplicate_num = duplicate_num + 1 "
        query = self.__dbpool.runInteraction(self._conditional_insert, sql, item)
        query.addErrback(_handle_error)
        return item

    # 写入数据库中
    def _conditional_insert(self, canshu, sql, item):
        # 这里item就是爬虫代码爬下来存入items内的数据
        params = (item['title'], item['img_urls'], item['union_code'])
        canshu.execute(sql, params)

    def __del__(self):
        try:
            self.__dbpool.close()
        except Exception as ex:
            print(ex)
