# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import pymongo
from girl.items.city import *
from datetime import *
import importlib
from dbShell import conf
from pprint import pprint


class pipeline(object):
    def __init__(self):
        self.client = pymongo.MongoClient("47.75.39.50", socketKeepAlive=True, maxPoolSize=200)
        self.client.admin.authenticate("admin321", "dsf::6666,,<<", mechanism='SCRAM-SHA-1')
        # pool = redis.ConnectionPool(host='localhost', port=6379, db=1)
        # conn = redis.Redis(connection_pool=pool)
        #

    def process_item(self, item, spider):
        self.config = getattr(conf, spider.name)
        self.db = self.client[self.config['mongoDb']]
        if isinstance(item, storesItem):
            self.db.store.insert(dict(item))
        elif isinstance(item, diaryItem):
            self.db.diary.insert(dict(item))
        elif isinstance(item, girlItem):
            self.db.girl.insert(dict(item))
        elif isinstance(item, movieItem):
            self.db.movie.insert(dict(item))
        elif isinstance(item, emailItem):
            self.db.email.insert(dict(item))
        # pprint(item)

    def close_spider(self, spider):
        self.config = getattr(conf, spider.name)
        self.job = self.client[self.config['mongoDb']]["spiderjob"]
        tdata = spider.crawler.stats.get_value('item_scraped_count')
        sdata = spider.crawler.stats.get_value('sdata')
        error = spider.crawler.stats.get_value('log_count/ERROR')
        stime = spider.crawler.stats.get_value('start_time')
        ftime = spider.crawler.stats.get_value('finish_time')
        # sdata = 10
        # 采集有效数据小于10条 报错

        self.client.close()
        if tdata < 10:
            msg = '%s itemNum too little tdata:%s' % (spider.name, tdata)
        # 采集有效数据和原始网站误差超过100条 报错
        elif abs(sdata - tdata) > 100:
            msg = '%s itemNum too difference sdata:%s tdata:%s' % (spider.name, sdata, tdata)
        else:
            try:
                for x in ["newstore","newgirl","newdiary","newmovie"]:
                        obj = importlib.import_module("girl.dbShell.city.%s.main" % x)(spider.name)
                        obj()

                # if spider.name == "city":
                #     for x in ["newgirl","newdiary","newmovie"]:
                #         obj = importlib.import_module("girl.dbShell.city.%s.main" % x)(spider.name)
                #         obj()
                # else:
                #     for x in ["newgirl","newdiary","newmovie"]:
                #         obj = importlib.import_module("girl.dbShell.city.%s.main" % x)(spider.name)
                #         obj()
            except Exception as e:
                msg = e.message
        self.job.insert({"date": str(date.today()), "sdata": sdata, "tdata": tdata,
                "name": spider.name, "start_time": stime, "error": error, "msg": msg, "end_time": ftime})

        return
