# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
import json
import pymysql
# import settings

class Day1701Pipeline(object):
    def __init__(self):
        self.f = open('job.text','w',encoding='utf-8')
    def process_item(self, item, spider):
        self.f.write(json.dumps(dict(item))+'\n')
        return item

    def close_spider(self,response,spider):
        self.f.close()


class JobspiderPipeline(object):
    def __init__(self):
        self.connect = pymysql.connect(
            host='localhost',
            port=3306,
            user='root',
            passwd='123456',

            db='pp',
            charset='utf8'
        )
        self.cursor = self.connect.cursor()

    def process_item(self, item, spider):
        sql = """INSERT INTO lagou_copy1(id,pname,money,location,year, degree,ptype,tags,date_pub,advantage,jobdesc,jobaddr,company) VALUES (null,"%s", "%s", "%s","%s","%s","%s","%s","%s","%s","%s","%s","%s") """ % (item["pname"], item["money"], item["location"], item["year"], item["degree"], item["ptype"],item["tags"], item["date_pub"], item["advantage"], item["jobdesc"], item["jobaddr"],item["company"])
        print(sql)
        self.cursor.execute(sql)

        # 4. 提交操作
        self.connect.commit()

    def close_spider(self, spider):
        self.cursor.close()
        self.connect.close()

