# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
import pymysql
from itemadapter import ItemAdapter


class LpPipeline:
    def process_item(self, item, spider):
        return item


class MySQLPipeline:
    def open_spider(self, spider):
        # 读取setting.pu配置
        host = spider.settings.get('MYSQL_DB_HOST')
        port = spider.settings.get('MYSQL_DB_POST')
        dbname = spider.settings.get('MYSQL_DB_NAME')
        user = spider.settings.get('MYSQL_DB_USER')
        pwd = spider.settings.get('MYSQL_DB_PASSWORD')

        # 创建数据库连接
        self.db_conn = pymysql.connect(host=host, port=port, db=dbname,
                                       user=user, password=pwd)

        # 打开游标
        self.db_cur = self.db_conn.cursor()

    def process_item(self, item, spider):
        values = (item['company'],
                  item['job'],
                  item['address'],
                  item['salary'],
                  item['experience'],
                  item['education'])

        sql = """insert into job_note(company,job,address,salary,experience,education)
         values(%s,%s,%s,%s,%s,%s)"""
        print(sql)
        self.db_cur.execute(sql, values)
        return item

    def close_spider(self, spider):
        self.db_conn.commit()
        self.db_cur.close()
        self.db_conn.close()
