# -*- coding: utf-8 -*-

import json

import scrapy
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
from scrapy.exceptions import NotConfigured
from scrapy.pipelines.images import ImagesPipeline

from doubanbook.mylib.mypymysql import mypymysql


# from scrapy.contrib.pipeline.images import ImagesPipeline


class DoubanbookPipeline(object):

    def __init__(self, file_name=None):
        if file_name is None:
            raise NotConfigured
        self.file_name = file_name
        self.fp = None

    def open_spider(self, spider):
        self.fp = open(self.file_name, 'w')

    def close_spider(self, spider):
        self.fp.close()

    def process_item(self, item, spider):
        json_item = json.dumps(dict(item))
        self.fp.write(json_item + "\n")
        return item

    @classmethod
    def from_crawler(cls, crawler):
        file_name = crawler.settings.get('FILE_NAME')
        # file_name = scrapy.conf.settings['FILE_NAME'] #这种方式也可以获取到配置
        return cls(file_name)


