# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
import pymysql
from .settings import MYSQL_CONF
from .items import JdCrawlerItem, JdItemInfoItem


class JdCrawlerPipeline:
    def __init__(self):
        self.coon = pymysql.connect(**MYSQL_CONF)
        self.cursor = self.coon.cursor()

    def process_item(self, item, spider):
        if isinstance(item, JdCrawlerItem):
            self.cursor.execute('INSERT INTO jd_search(img, price, name, shop, url) VALUES(%s,%s,%s,%s,%s)',
                                (item['img'], item['price'], item['name'], item['shop'], item['url']))
            self.coon.commit()
        return item

    def close_spider(self, spider):
        self.cursor.close()
        self.coon.close()


class JdItemInfoPipeline:

    def __init__(self):
        self.coon = pymysql.connect(**MYSQL_CONF)
        self.cursor = self.coon.cursor()

    def process_item(self, item, spider):
        if isinstance(item, JdItemInfoItem):
            self.cursor.execute('INSERT INTO jd_item_info(url, name, details) VALUES(%s,%s,%s)',
                                (item['url'], item['name'], item['details']))
            self.coon.commit()
        return item

    def close_spider(self, spider):
        self.cursor.close()
        self.coon.close()
