# -*- coding: utf-8 -*-
import os


# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class GlobalTimesPipeline(object):
    def process_item(self, item, spider):
        # 中文乱码解决
        import sys
        reload(sys)
        sys.setdefaultencoding("utf-8")
        
        # 存储路劲
        save_path = spider.settings.get('GLOBAL_TIMES_DATA_PATH')
        abs_save_path = os.path.abspath(save_path)
        if not os.path.isdir(abs_save_path):
            os.mkdir(abs_save_path, 0755)
            
        news_save_path = os.path.join(abs_save_path, item['news_id'] + '.txt')
        with open(news_save_path, "w") as f:
            f.write("Headline:\n\t%s\nNews Link:\n\t%s\nCategory:\n\t%s\nAbstract:\n\t%s" % (item['headline'], item['news_link'], item['category'], item['abstract']))
        
        return item
