# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
# from scrapy.conf import settings
import pymysql


class CrawljingdongPipeline(object):
    def __init__(self):
        # host = settings['MYSQL_HOST']
        # dbname = settings['MYSQL_DBNAME']
        # username = settings['MYSQL_USER']
        # password = settings['MYSQL_PASSWORD']
        host='10.10.10.43'
        dbname='spider'
        username='root'
        password='yf123456'

        self.db = pymysql.connect(host, username, password, dbname)
        self.cursor = self.db.cursor()
        self.index=0

    def process_item(self, item, spider):

        row = dict(item)
        template = "insert into jd values (default, '{name}','{price}','{comment}','{shop}','{level_one}','{level_two}','{detail}','{url}','{img}')"
        sql=template.format(**row)
        self.cursor.execute(sql)
        self.db.commit()
        self.index=self.index+1
        print('insert',self.index)
        return item
