# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
from scrapy.conf import settings
import logging
import os, time

import pymongo


class SegmentfaultPipeline(object):
    def __init__(self):
        connection = pymongo.MongoClient(
            settings['MONGODB_SERVER'],
            settings['MONGODB_PORT']
        )
        db = connection[settings['MONGODB_DB']]
        self.collection = db[settings['MONGODB_COLLECTION_segmentfault']]

    def process_item(self, item, spider):
        self.collection.update({'url': item['url']}, dict(item), upsert=True)
        logging.debug("Question added to MongoDB database!")
        return item


class XsPipeline(object):
    def process_item(self, item, spider):
        self.save_as_file(
            item.get('book_name'),
            item.get('chapter_id'),
            item.get('chapter_name'),
            item.get('chapter_content')
        )
        return item

    def save_as_file(self, book_name, chapter_id, chapter_name, chapter_content):
        book_path = r'./books/%s' % book_name
        if not os.path.exists(book_path):
            os.makedirs(book_path)

        file_name = r'./books/%s/%s_%s.txt' % (book_name, chapter_id, chapter_name)
        if not os.path.exists(file_name):
            with open(file_name, 'w') as f:
                f.write(chapter_content.encode('utf-8'))
