# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
import pymysql
from myspider import settings


class MyspiderPipeline(object):
    # 连接数据库
    connect = pymysql.connect(
        host=settings.MYSQL_HOST,
        db=settings.MYSQL_DBNAME,
        user=settings.MYSQL_USER,
        password=settings.MYSQL_PASSWD,
        charset='utf8',
        use_unicode=True)

    # 通过 cursor 执行增删查改
    cursor = connect.cursor()

    # # 清除历史数据
    # if settings.MYSQL_CLRDATA == True:
    #     self.cursor.execute("""TRUNCATE TABLE %s;""" % settings.MYSQL_TABLE)

    def process_item(self, item, spider):
        sql = "INSERT INTO {0}(jobname, jobwage, jobcompany, jobinfo) " \
              "VALUE ('{1}' ,'{2}' ,'{3}' ,'{4}' );".format(
            settings.MYSQL_TABLE,
            item['name'],
            item['wage'],
            item['company'],
            item['info'])
        print(sql)
        self.cursor.execute(sql)
        self.connect.commit()  # 提交sql语句
        return item

    def close_spider(self, spider):
        self.connect.close()
        self.cursor.close()
