# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html

import pymysql
from xhs_spider.items import XhsSpiderItem,NoteItem

class XhsSpiderPipeline(object):
    def open_spider(self,spider):
        self.coon=pymysql.connect('127.0.0.1','root','123456','xhs',charset='utf8')
        self.cursor=self.coon.cursor()
    def process_item(self, item, spider):
        # 判断item类型,写入对应的数据库
        try:
            if isinstance(item,XhsSpiderItem):
                sql,data=item.save_to_mysql()
                self.cursor.execute(sql,data)
                self.coon.commit()
                print('--------信息写入成功--------')
            # elif isinstance(item,NoteItem):
            #     sql,data = item.save_to_xhs_note()
            #     self.cursor.execute(sql, data)
            #     self.coon.commit()
            #     print('--------评论写入成功--------')
                return item
        except Exception as e:
            print('写入失败,原因%s'%e)
    def close_spider(self,spider):
        self.cursor.close()
        self.coon.close()

