# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html

import pymysql

class ComputerPipeline(object):
    def open_spider(self,spider):
        self.conn = pymysql.connect(host='127.0.0.1',user='root',passwd='pwd',db='pdd')
        self.cur = self.conn.cursor()
        pass

    def close_spider(self,spider):
        self.conn.commit()
        self.conn.close()
        pass

    def process_item(self, item, spider):
        data=(item["crawl_time"],item["goodsID"],item["goodsName"],item["goodsPrice"],item["discount"],item["discount_num"],item["discount_price"],item["sell"],item["introduce"],item["attitude"],item["delivery_speed"],item["goodsUrl"])
        self.cur.execute('INSERT INTO `拼多多洗衣液数据` (`爬取时间`, `商品ID`, `商品名称`, `商品价格`, `优惠券`, `优惠券数量`, `优惠后价格`, `销量`, `描述相符`,`服务态度`, `发货速度`, `商品链接`) VALUES %s'%str(data))
        self.conn.commit()
        return item
