# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
import pymysql


class QcwyPipeline:
    def process_item(self, item, spider):
        return item

class MySQLPipeline:
    def open_spider(self, spider):
        # 读取setting.pu配置
        host = spider.settings.get('MYSQL_DB_HOST')
        port = spider.settings.get('MYSQL_DB_POST')
        dbname = spider.settings.get('MYSQL_DB_NAME')
        user = spider.settings.get('MYSQL_DB_USER')
        pwd = spider.settings.get('MYSQL_DB_PASSWORD')
        # 创建数据库连接
        self.db_conn = pymysql.connect(host=host, port=port, db=dbname,user=user, password=pwd)
        # 打开游标
        self.db_cur = self.db_conn.cursor()
       #数据项
    def process_item(self, item, spider):
        values = (item['company'],
                  item['job'],
                  item['address'],
                  item['salary'],
                  item['experience'],
                  item['education'],)

        sql = "insert into Job_note(company, job, address, salary, experience, education) " \
              "values(%s, %s, %s, %s, %s, %s)"
#提交数据内容
        self.db_cur.execute(sql, values)
        return item
#关闭数据连接
    def close_spider(self, spider):
        self.db_conn.commit()
        self.db_cur.close()
        self.db_conn.close()
