# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
from pymongo import MongoClient
import pymysql


class DangdangDistributedPipeline:
    def __init__(self):
        self.client = MongoClient(host='127.0.0.1', port=27017)
        self.collection = self.client['books']['dangdang']

    def process_item(self, item, spider):
        # 需要转换为字典格式才能存入数据库
        self.collection.save(dict(item))
        return item


class DangdangDistributedMysqlPipeline():
    def open_spider(self, spider):
        # 创建数据库连接
        self.db = pymysql.connect(host = 'localhost',user='root',password='010108',port=3306,db='dangdang')
        self.cursor = self.db.cursor()

    def process_item(self, item, spider):
        print('item',item,type(item))

        sql = "insert into books(b_cate, m_cate, s_cate, s_href, book_img, book_name, book_price) values(%s,%s,%s,%s,%s,%s,%s)"

        data = (item['b_cate'],item['m_cate'],item['s_cate'],item['s_href'],item['book_img'],item['book_name'],item['book_price'])
        print(sql)
        try:
            self.cursor.execute(sql,data)
            self.db.commit()
            return item
        except Exception as e:
            self.db.rollback()
            print('插入表失败case:%s'%e)


    def spider_close(self, spider):
        # 关闭数据库
        self.cursor.close()
        self.db.close()
