#! /usr/bin/env python
# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html  
from scrapy import signals  
from scrapy import log
import json,codecs  
from CollectSpider.kancloud import db
from scrapy.xlib.pydispatch import dispatcher
from scrapy.utils.project import get_project_settings  
from scrapy.exceptions import DropItem
from CollectSpider.kancloud.MsgSend import MsgSend
from scrapy.contrib.exporter import JsonItemExporter
from CollectSpider.service.task import  update_status,update_time_interval
class FilterWordsPipeline(object):
    """A pipeline for filtering out items which contain certain words in their
    description"""

    # put all words in lowercase
    words_to_filter = ['politics', 'religion', 'pilgrim']

    def process_item(self, item, spider):
        for word in self.words_to_filter:
            if False:
                raise DropItem("Contains forbidden word: %s" % word)
        else:
            return item


#json  写入文件
class JsonWirterFilePipeline(object):  
  
        def __init__(self): 
             
            self.file = codecs.open('fetch.json', 'w', encoding='utf-8')  
            dispatcher.connect(self.spider_closed, signals.spider_closed)
            dispatcher.connect(self.spider_error,signals.spider_error)
        def process_item(self, item, spider):  
            
            line = json.dumps(dict(item), ensure_ascii=False) + "\n"  
            self.file.write(line) 
            return item  
      
        def spider_closed(self, spider):
            #完成爬取后，修改数据库任务状态,为停止状态
            update_status(spider.task)
            #及时运行 中  单次运行,修改运行状态为 -1  表示执行过，不在执行
            update_time_interval(spider.task)
            #发送通知数据通知信息
            MsgSend.send_msg(**dict({"message":"已采集数据"+str(spider.total_count)+"条","taskid":spider.task["id"],"code":201,"extra":"数据采集成功数"}))
            print u"%s cloesd" % spider.name  
            
            #self.file.close()  
        
        def spider_error(self,spider):
            #spider error 处理
            update_status(spider.task)
            print u"%s error" % spider.name

class JsonExportPipeline(object):
    def __init__(self):
        log.msg('JsonExportPipeline.init....', level=log.INFO)
        self.files = {}

    @classmethod
    def from_crawler(cls, crawler):
        log.msg('JsonExportPipeline.from_crawler....', level=log.INFO)
        pipeline = cls()
        crawler.signals.connect(pipeline.spider_opened, signals.spider_opened)
        crawler.signals.connect(pipeline.spider_closed, signals.spider_closed)
        return pipeline

    def spider_opened(self, spider):
        log.msg('JsonExportPipeline.spider_opened....', level=log.INFO)
        files = open('%s.json' % spider.name, 'w+b')
        self.files[spider] = files
        self.exporter = JsonItemExporter(files)
        self.exporter.start_exporting()

    def spider_closed(self, spider):
        log.msg('JsonExportPipeline.spider_closed....', level=log.INFO)
        self.exporter.finish_exporting()
        files = self.files.pop(spider)
        files.close()

    def process_item(self, item, spider):
        log.msg('JsonExportPipeline.process_item....', level=log.INFO)
        self.exporter.export_item(item)
        return item
 
#
class CollectSpiderDBPipeLine(object):

    def __init__(self):
        dispatcher.connect(self.engine_opend, signals.engine_started)
        #dispatcher.connect(self.spider_closed, signals.spider_closed)

    
    def engine_opend(self):
        try:
            settings=get_project_settings()
            dbconfig = dict(
              host = settings['DB_HOST'],
              database = settings['DB_DBNAME'],
              port = settings['DB_PORT'],
              user = settings['DB_USER'],
              password = settings['DB_PASSWD'],
              charset = 'utf8',
              use_unicode = True,
            )
            
            db.create_engine(user=dbconfig["user"],password=dbconfig["password"],database=dbconfig["database"],host=dbconfig["host"],charset=dbconfig["charset"])
            print dbconfig

        except Exception,e:
            #print u"异常:->"+e
            print e
            
        
       
          
    def spider_closed(self,spider):
            #完成爬取后，修改数据库任务状态
        #update_status(spider.task)
        #print u"%s cloesd" % spider.name
        pass
    
    def process_item(self, item, spider):  
        try:
        #print item
            db.insert("spider_content_collect",**dict(item));
        except Exception,e:
            print e
        finally:
            return item  
    
    
