# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html

import codecs
import json
import logging
import os

from spider.settings import DATA_DIR


class SpiderPipeline(object):
    def process_item(self, item, spider):
        return item


LOG = logging.getLogger(__name__)


class JsonWriterPipeline(object):
    file = None
    filemode = 'w'

    @staticmethod
    def get_filename(spider):
        if hasattr(spider, 'custom_store_filename'):
            return getattr(spider, 'custom_store_filename')
        return 'items-%s.txt' % spider.name

    def get_filepath(self, spider):
        return os.path.join(DATA_DIR, self.get_filename(spider))

    def open_file(self, spider):
        filepath = self.get_filepath(spider)
        self.file = codecs.open(filepath, self.filemode, encoding='utf-8')

    def open_spider(self, spider):
        LOG.debug('JsonWriterPipeline.open_spider: %s', spider.name)
        self.open_file(spider)

    def close_spider(self, spider):
        LOG.debug('JsonWriterPipeline.close_spider: %s', spider.name)
        self.file.close()

    def process_item(self, item, spider):
        LOG.debug('JsonWriterPipeline.process_item: %s', spider.name)
        line = json.dumps(dict(item), ensure_ascii=False) + '\n'
        self.file.write(line)
        return item


class JsonAppenderPipeline(JsonWriterPipeline):
    filemode = 'a+'
